[ 485.837023] env[62508]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62508) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.837370] env[62508]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62508) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.837493] env[62508]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62508) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 485.837824] env[62508]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 485.937595] env[62508]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62508) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 485.949916] env[62508]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.012s {{(pid=62508) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 486.549181] env[62508]: INFO nova.virt.driver [None req-e98a8e74-0567-4149-a407-6e82eb45d4a0 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 486.620616] env[62508]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.620773] env[62508]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.620874] env[62508]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62508) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 489.920746] env[62508]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f423e4d0-6056-4e95-b2e0-bdc82ba24a41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.936809] env[62508]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62508) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 489.936934] env[62508]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-33866181-9191-4430-8c90-ebf4f33c4ad1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.961743] env[62508]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 87a76. [ 489.961903] env[62508]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.341s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 489.962425] env[62508]: INFO nova.virt.vmwareapi.driver [None req-e98a8e74-0567-4149-a407-6e82eb45d4a0 None None] VMware vCenter version: 7.0.3 [ 489.965881] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79392dbe-7354-48f1-91e7-d475c51cf597 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.983066] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e65bb3-b3d9-45a1-a08c-50fe51f81a2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.988977] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa064010-6960-41a5-8294-2d2e65e6acc2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.995628] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5f47b5-78fb-446f-97fa-bcd37920695a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 490.008657] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc40dd2-b95a-41c3-aeaf-4b70f1673f74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 490.014585] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755645da-4946-4712-b003-7463a94c77f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 490.045394] env[62508]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-35b32056-ea4c-4e35-984f-9afe22179a40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 490.050907] env[62508]: DEBUG nova.virt.vmwareapi.driver [None req-e98a8e74-0567-4149-a407-6e82eb45d4a0 None None] Extension org.openstack.compute already exists. {{(pid=62508) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 490.053618] env[62508]: INFO nova.compute.provider_config [None req-e98a8e74-0567-4149-a407-6e82eb45d4a0 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 490.557258] env[62508]: DEBUG nova.context [None req-e98a8e74-0567-4149-a407-6e82eb45d4a0 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),a5927543-5702-4c5e-9b22-bcdd88b60779(cell1) {{(pid=62508) load_cells /opt/stack/nova/nova/context.py:464}} [ 490.559401] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 490.559664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 490.560386] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 490.560845] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Acquiring lock "a5927543-5702-4c5e-9b22-bcdd88b60779" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 490.561057] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Lock "a5927543-5702-4c5e-9b22-bcdd88b60779" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 490.562087] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Lock "a5927543-5702-4c5e-9b22-bcdd88b60779" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 490.583560] env[62508]: INFO dbcounter [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Registered counter for database nova_cell0 [ 490.591811] env[62508]: INFO dbcounter [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Registered counter for database nova_cell1 [ 490.595072] env[62508]: DEBUG oslo_db.sqlalchemy.engines [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62508) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 490.595418] env[62508]: DEBUG oslo_db.sqlalchemy.engines [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62508) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 490.600266] env[62508]: ERROR nova.db.main.api [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 490.600266] env[62508]: result = function(*args, **kwargs) [ 490.600266] env[62508]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 490.600266] env[62508]: return func(*args, **kwargs) [ 490.600266] env[62508]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 490.600266] env[62508]: result = fn(*args, **kwargs) [ 490.600266] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 490.600266] env[62508]: return f(*args, **kwargs) [ 490.600266] env[62508]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 490.600266] env[62508]: return db.service_get_minimum_version(context, binaries) [ 490.600266] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 490.600266] env[62508]: _check_db_access() [ 490.600266] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 490.600266] env[62508]: stacktrace = ''.join(traceback.format_stack()) [ 490.600266] env[62508]: [ 490.601250] env[62508]: ERROR nova.db.main.api [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 490.601250] env[62508]: result = function(*args, **kwargs) [ 490.601250] env[62508]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 490.601250] env[62508]: return func(*args, **kwargs) [ 490.601250] env[62508]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 490.601250] env[62508]: result = fn(*args, **kwargs) [ 490.601250] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 490.601250] env[62508]: return f(*args, **kwargs) [ 490.601250] env[62508]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 490.601250] env[62508]: return db.service_get_minimum_version(context, binaries) [ 490.601250] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 490.601250] env[62508]: _check_db_access() [ 490.601250] env[62508]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 490.601250] env[62508]: stacktrace = ''.join(traceback.format_stack()) [ 490.601250] env[62508]: [ 490.601920] env[62508]: WARNING nova.objects.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 490.601920] env[62508]: WARNING nova.objects.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Failed to get minimum service version for cell a5927543-5702-4c5e-9b22-bcdd88b60779 [ 490.602192] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Acquiring lock "singleton_lock" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 490.602358] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Acquired lock "singleton_lock" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 490.602597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Releasing lock "singleton_lock" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 490.602919] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Full set of CONF: {{(pid=62508) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 490.603076] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ******************************************************************************** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 490.603207] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Configuration options gathered from: {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 490.603343] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 490.603529] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 490.603658] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ================================================================================ {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 490.603867] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] allow_resize_to_same_host = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604048] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] arq_binding_timeout = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604181] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] backdoor_port = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604309] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] backdoor_socket = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604473] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] block_device_allocate_retries = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604633] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] block_device_allocate_retries_interval = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604805] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cert = self.pem {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.604972] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605155] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute_monitors = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605325] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] config_dir = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605491] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] config_drive_format = iso9660 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605626] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605789] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] config_source = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.605955] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] console_host = devstack {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606131] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] control_exchange = nova {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606291] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cpu_allocation_ratio = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606453] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] daemon = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606617] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] debug = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606777] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_access_ip_network_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.606947] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_availability_zone = nova {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.607110] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_ephemeral_format = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.607272] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_green_pool_size = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.607511] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.607736] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] default_schedule_zone = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.607913] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] disk_allocation_ratio = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608089] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] enable_new_services = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608277] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] enabled_apis = ['osapi_compute'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608442] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] enabled_ssl_apis = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608606] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] flat_injected = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608771] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] force_config_drive = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.608931] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] force_raw_images = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.609110] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] graceful_shutdown_timeout = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.609275] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] heal_instance_info_cache_interval = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.609499] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] host = cpu-1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.609686] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.609867] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610045] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610260] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610423] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_build_timeout = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610585] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_delete_interval = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610784] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_format = [instance: %(uuid)s] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.610965] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_name_template = instance-%08x {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611146] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_usage_audit = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611325] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_usage_audit_period = month {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611491] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611659] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611825] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] internal_service_availability_zone = internal {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.611984] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] key = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612160] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] live_migration_retry_count = 30 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612329] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_color = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612492] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_config_append = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612657] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612820] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_dir = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.612979] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613120] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_options = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613285] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_rotate_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613450] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_rotate_interval_type = days {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613617] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] log_rotation_type = none {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613744] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.613873] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614049] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614219] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614382] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614558] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] long_rpc_timeout = 1800 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614721] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_concurrent_builds = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.614880] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_concurrent_live_migrations = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615048] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_concurrent_snapshots = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615211] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_local_block_devices = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615369] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_logfile_count = 30 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615522] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] max_logfile_size_mb = 200 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615683] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] maximum_instance_delete_attempts = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.615854] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metadata_listen = 0.0.0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616035] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metadata_listen_port = 8775 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616205] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metadata_workers = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616366] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] migrate_max_retries = -1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616532] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] mkisofs_cmd = genisoimage {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616744] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.616879] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] my_ip = 10.180.1.21 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617053] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] network_allocate_retries = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617237] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617406] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617576] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] osapi_compute_listen_port = 8774 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617749] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] osapi_compute_unique_server_name_scope = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.617921] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] osapi_compute_workers = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618101] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] password_length = 12 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618271] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] periodic_enable = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618433] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] periodic_fuzzy_delay = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618602] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] pointer_model = usbtablet {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618769] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] preallocate_images = none {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.618934] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] publish_errors = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619077] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] pybasedir = /opt/stack/nova {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619237] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ram_allocation_ratio = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619398] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rate_limit_burst = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619564] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rate_limit_except_level = CRITICAL {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619751] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rate_limit_interval = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.619924] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reboot_timeout = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620099] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reclaim_instance_interval = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620260] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] record = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620431] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reimage_timeout_per_gb = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620594] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] report_interval = 120 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620783] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rescue_timeout = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.620951] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reserved_host_cpus = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621126] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reserved_host_disk_mb = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621288] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reserved_host_memory_mb = 512 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621446] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] reserved_huge_pages = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621603] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] resize_confirm_window = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621763] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] resize_fs_using_block_device = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.621924] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] resume_guests_state_on_host_boot = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622104] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622267] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] rpc_response_timeout = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622428] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] run_external_periodic_tasks = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622598] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] running_deleted_instance_action = reap {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622760] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.622921] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] running_deleted_instance_timeout = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623090] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler_instance_sync_interval = 120 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623262] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_down_time = 720 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623431] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] servicegroup_driver = db {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623587] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] shell_completion = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623748] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] shelved_offload_time = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.623908] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] shelved_poll_interval = 3600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.624233] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] shutdown_timeout = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.624351] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] source_is_ipv6 = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.624576] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ssl_only = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.624902] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.625147] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] sync_power_state_interval = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.625360] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] sync_power_state_pool_size = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.625578] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] syslog_log_facility = LOG_USER {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.625773] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] tempdir = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.626042] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] timeout_nbd = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.626282] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] transport_url = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.626489] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] update_resources_interval = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.626669] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_cow_images = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.626851] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_eventlog = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.627099] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_journal = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.627332] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_json = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.627546] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_rootwrap_daemon = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.627759] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_stderr = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.627973] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] use_syslog = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.628197] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vcpu_pin_set = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.628423] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plugging_is_fatal = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.628620] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plugging_timeout = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.628820] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] virt_mkfs = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.628997] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] volume_usage_poll_interval = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.629180] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] watch_log_file = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.629378] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] web = /usr/share/spice-html5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 490.629626] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.629885] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.630044] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.630229] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_concurrency.disable_process_locking = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.630836] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631045] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631228] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631409] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631584] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631760] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.631952] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.auth_strategy = keystone {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.632141] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.compute_link_prefix = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.632329] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.632510] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.dhcp_domain = novalocal {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.632687] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.enable_instance_password = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633440] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.glance_link_prefix = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633440] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633440] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633440] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.instance_list_per_project_cells = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633592] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.list_records_by_skipping_down_cells = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633685] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.local_metadata_per_cell = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.633898] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.max_limit = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.634117] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.metadata_cache_expiration = 15 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.634345] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.neutron_default_tenant_id = default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.634551] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.response_validation = warn {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.634747] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.use_neutron_default_nets = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.634926] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.635109] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.635283] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.635462] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.635646] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_dynamic_targets = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.635817] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_jsonfile_path = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636008] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636212] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.backend = dogpile.cache.memcached {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636384] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.backend_argument = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636549] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.backend_expiration_time = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636724] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.config_prefix = cache.oslo {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.636907] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.dead_timeout = 60.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.637083] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.debug_cache_backend = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.637253] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.enable_retry_client = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.637473] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.enable_socket_keepalive = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.637693] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.enabled = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.637876] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.enforce_fips_mode = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638060] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.expiration_time = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638235] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.hashclient_retry_attempts = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638404] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638569] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_dead_retry = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638732] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_password = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.638903] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639088] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639260] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_pool_maxsize = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639430] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639598] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_sasl_enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639803] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.639980] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.640160] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.memcache_username = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.640331] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.proxies = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.640497] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_db = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.640664] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_password = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.640838] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641024] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641203] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_server = localhost:6379 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641371] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_socket_timeout = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641530] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.redis_username = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641699] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.retry_attempts = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.641870] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.retry_delay = 0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642046] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.socket_keepalive_count = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642217] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.socket_keepalive_idle = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642381] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.socket_keepalive_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642542] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.tls_allowed_ciphers = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642701] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.tls_cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.642863] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.tls_certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643042] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.tls_enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643197] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cache.tls_keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643370] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643545] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.auth_type = password {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643705] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.643880] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644045] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644218] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644383] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.cross_az_attach = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644549] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.debug = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644708] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.endpoint_template = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.644874] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.http_retries = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645047] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645211] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645383] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.os_region_name = RegionOne {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645551] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645710] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cinder.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.645883] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646076] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.cpu_dedicated_set = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646230] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.cpu_shared_set = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646396] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.image_type_exclude_list = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646562] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646726] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.646891] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647069] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647242] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647408] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.resource_provider_association_refresh = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647588] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647767] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.shutdown_retry_interval = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.647952] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.648147] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] conductor.workers = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.648327] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] console.allowed_origins = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.648490] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] console.ssl_ciphers = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.648661] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] console.ssl_minimum_version = default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.648834] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] consoleauth.enforce_session_timeout = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649013] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] consoleauth.token_ttl = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649194] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649353] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649519] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649704] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.649874] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650047] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650216] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650374] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650534] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650699] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.650856] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651020] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651180] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651348] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.service_type = accelerator {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651513] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651670] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651828] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.651988] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.652180] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.652344] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] cyborg.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.652524] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.backend = sqlalchemy {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.652695] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.connection = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.652864] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.connection_debug = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653044] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.connection_parameters = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653215] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.connection_recycle_time = 3600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653381] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.connection_trace = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653543] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.db_inc_retry_interval = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653707] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.db_max_retries = 20 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.653873] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.db_max_retry_interval = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654046] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.db_retry_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654218] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.max_overflow = 50 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654382] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.max_pool_size = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654544] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.max_retries = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654713] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.654877] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.mysql_wsrep_sync_wait = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655048] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.pool_timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655219] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.retry_interval = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655379] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.slave_connection = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655596] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.sqlite_synchronous = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655705] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] database.use_db_reconnect = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.655884] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.backend = sqlalchemy {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656065] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.connection = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656236] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.connection_debug = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656406] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.connection_parameters = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656571] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.connection_recycle_time = 3600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656736] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.connection_trace = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.656906] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.db_inc_retry_interval = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657084] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.db_max_retries = 20 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657255] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.db_max_retry_interval = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657419] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.db_retry_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657600] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.max_overflow = 50 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657772] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.max_pool_size = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.657938] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.max_retries = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658127] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658292] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658452] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.pool_timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658618] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.retry_interval = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658776] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.slave_connection = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.658944] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] api_database.sqlite_synchronous = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.659132] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] devices.enabled_mdev_types = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.659311] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.659481] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.659665] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ephemeral_storage_encryption.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.659833] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660013] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.api_servers = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660187] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660375] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660524] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660698] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.660840] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661015] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.debug = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661188] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.default_trusted_certificate_ids = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661352] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.enable_certificate_validation = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661518] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.enable_rbd_download = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661677] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.661847] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662017] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662184] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662381] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662503] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.num_retries = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662670] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.rbd_ceph_conf = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.662837] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.rbd_connect_timeout = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663009] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.rbd_pool = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663191] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.rbd_user = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663350] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663509] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663667] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.663837] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.service_type = image {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664017] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664178] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664335] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664492] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664671] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664839] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.verify_glance_signatures = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.664999] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] glance.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.665182] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] guestfs.debug = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.665349] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] mks.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.665709] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.665902] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.manager_interval = 2400 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666086] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.precache_concurrency = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666262] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.remove_unused_base_images = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666434] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666600] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666777] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] image_cache.subdirectory_name = _base {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.666956] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.api_max_retries = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667136] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.api_retry_interval = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667300] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667466] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.auth_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667648] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667815] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.667981] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668160] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.conductor_group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668322] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668482] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668641] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668806] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.668964] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669136] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669294] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669459] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.peer_list = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669617] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669797] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.669966] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.serial_console_state_timeout = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670141] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670313] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.service_type = baremetal {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670471] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.shard = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670641] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670803] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.670962] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.671133] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.671316] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.671478] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ironic.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.671661] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.671836] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] key_manager.fixed_key = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.672057] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.672201] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.barbican_api_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.672363] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.barbican_endpoint = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.672535] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.barbican_endpoint_type = public {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673196] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.barbican_region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673196] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673196] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673196] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673333] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673554] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673644] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.number_of_retries = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673795] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.retry_delay = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.673981] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.send_service_user_token = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674148] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674295] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674456] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.verify_ssl = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674615] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican.verify_ssl_path = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674778] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.674944] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.auth_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675117] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675279] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675445] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675607] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675792] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.675930] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676112] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] barbican_service_user.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676266] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.approle_role_id = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676425] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.approle_secret_id = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676590] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.kv_mountpoint = secret {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676784] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.kv_path = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.676906] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.kv_version = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.677073] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.namespace = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.677230] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.root_token_id = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678080] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.ssl_ca_crt_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678080] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.timeout = 60.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678080] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.use_ssl = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678080] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678080] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678313] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678393] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678556] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678715] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.678873] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679155] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679236] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679350] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679504] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679674] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.679845] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680013] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680190] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.service_type = identity {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680355] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680516] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680676] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.680837] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681025] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681193] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] keystone.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681390] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.connection_uri = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681554] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_mode = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681721] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.681889] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_models = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682072] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_power_governor_high = performance {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682244] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682408] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_power_management = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682579] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682773] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.device_detach_attempts = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.682957] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.device_detach_timeout = 20 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683143] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.disk_cachemodes = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683306] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.disk_prefix = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683472] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.enabled_perf_events = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683636] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.file_backed_memory = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683803] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.gid_maps = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.683962] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.hw_disk_discard = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684133] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.hw_machine_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684302] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_rbd_ceph_conf = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684468] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684633] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684802] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_rbd_glance_store_name = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.684971] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_rbd_pool = rbd {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685155] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_type = default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685320] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.images_volume_group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685485] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.inject_key = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685642] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.inject_partition = -2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685807] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.inject_password = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.685973] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.iscsi_iface = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686151] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.iser_use_multipath = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686320] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686484] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686648] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_downtime = 500 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686815] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.686978] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.687157] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_inbound_addr = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.687322] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.687485] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.687686] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_scheme = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.687879] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_timeout_action = abort {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688065] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_tunnelled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688236] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_uri = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688402] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.live_migration_with_native_tls = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688564] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.max_queues = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688730] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.688970] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.689150] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.nfs_mount_options = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.689464] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.689638] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.689824] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.689993] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.690173] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.690338] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_pcie_ports = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.690503] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.690674] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.pmem_namespaces = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.690835] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.quobyte_client_cfg = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.691232] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.691479] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.691603] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.691813] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.691984] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rbd_secret_uuid = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692166] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rbd_user = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692334] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692507] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692669] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rescue_image_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692829] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rescue_kernel_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.692987] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rescue_ramdisk_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.693170] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.693331] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.rx_queue_size = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.693497] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.smbfs_mount_options = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.693797] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.693972] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.snapshot_compression = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.694146] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.snapshot_image_format = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.694776] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.694776] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.sparse_logical_volumes = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.694776] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.swtpm_enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.694893] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.swtpm_group = tss {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.695017] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.swtpm_user = tss {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.695192] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.sysinfo_serial = unique {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.695372] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.tb_cache_size = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.695500] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.tx_queue_size = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.695660] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.uid_maps = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.use_virtio_for_bridges = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.virt_type = kvm {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.volume_clear = zero {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.volume_clear_size = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.volume_use_multipath = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_cache_path = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699021] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.vzstorage_mount_user = stack {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.auth_type = password {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.default_floating_pool = public {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699594] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699594] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.extension_sync_interval = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699787] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.http_retries = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.699944] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700110] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700264] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700426] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700581] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700754] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.ovs_bridge = br-int {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.700916] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.physnets = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701091] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.region_name = RegionOne {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701249] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701414] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.service_metadata_proxy = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701571] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701734] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.service_type = network {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.701893] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702057] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702213] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702367] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702540] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702700] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] neutron.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.702928] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] notifications.bdms_in_notifications = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.703146] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] notifications.default_level = INFO {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.703327] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] notifications.notification_format = unversioned {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.703494] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] notifications.notify_on_state_change = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.703671] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.703850] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] pci.alias = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704027] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] pci.device_spec = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704199] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] pci.report_in_placement = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704367] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704541] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.auth_type = password {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704711] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.704875] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705046] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705215] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705376] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705536] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705694] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.default_domain_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.705939] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.default_domain_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706140] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.domain_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706307] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.domain_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706466] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706629] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706785] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.706939] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707109] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707279] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.password = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707435] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.project_domain_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707625] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.project_domain_name = Default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707805] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.project_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.707981] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.project_name = service {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708167] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.region_name = RegionOne {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708331] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708484] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708647] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.service_type = placement {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708813] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.708975] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709150] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709310] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.system_scope = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709469] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709631] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.trust_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709813] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.user_domain_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.709988] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.user_domain_name = Default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.710167] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.user_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.710344] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.username = nova {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.710521] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.710684] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] placement.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.710894] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.cores = 20 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711074] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.count_usage_from_placement = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711248] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711417] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.injected_file_content_bytes = 10240 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711585] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.injected_file_path_length = 255 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711739] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.injected_files = 5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.711906] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.instances = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712086] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.key_pairs = 100 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712254] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.metadata_items = 128 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712419] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.ram = 51200 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712582] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.recheck_quota = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712747] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.server_group_members = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.712912] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] quota.server_groups = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713103] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713272] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713432] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.image_metadata_prefilter = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713595] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713791] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.max_attempts = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.713983] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.max_placement_results = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.714165] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.714334] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.714497] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.714674] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] scheduler.workers = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.714851] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715030] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715217] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715386] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715554] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715718] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.715884] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716087] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716267] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.host_subset_size = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716425] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716588] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716752] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.716921] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.isolated_hosts = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717085] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.isolated_images = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717245] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717411] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717579] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717762] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.pci_in_placement = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.717937] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718113] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718272] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718426] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718592] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718747] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.718901] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.track_instance_changes = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.719094] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.719260] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metrics.required = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.719416] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metrics.weight_multiplier = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.719572] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.719770] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] metrics.weight_setting = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.720100] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.720281] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.720463] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.port_range = 10000:20000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.720635] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.720820] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721000] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] serial_console.serialproxy_port = 6083 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721183] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721351] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.auth_type = password {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721510] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721662] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721824] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.721982] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722146] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722317] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.send_service_user_token = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722477] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722629] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] service_user.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722818] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.agent_enabled = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.722978] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.723297] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.723501] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.723667] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.html5proxy_port = 6082 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.723826] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.image_compression = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.723981] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.jpeg_compression = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724154] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.playback_compression = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724324] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.require_secure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724480] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.server_listen = 127.0.0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724639] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724794] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.streaming_mode = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.724947] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] spice.zlib_compression = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725128] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] upgrade_levels.baseapi = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725305] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] upgrade_levels.compute = auto {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725467] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] upgrade_levels.conductor = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725627] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] upgrade_levels.scheduler = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725794] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.725960] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726134] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726296] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726459] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726621] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726782] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.726945] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.727118] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vendordata_dynamic_auth.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.727295] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.api_retry_count = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.727455] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.ca_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.727650] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.727859] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.cluster_name = testcl1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728039] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.connection_pool_size = 10 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728202] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.console_delay_seconds = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728370] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.datastore_regex = ^datastore.* {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728585] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728759] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.host_password = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.728930] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.host_port = 443 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.729109] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.host_username = administrator@vsphere.local {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.729280] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.insecure = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.729462] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.integration_bridge = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.729596] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.maximum_objects = 100 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.729790] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.pbm_default_policy = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730018] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.pbm_enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730182] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.pbm_wsdl_location = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730354] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730513] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.serial_port_proxy_uri = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730671] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.serial_port_service_uri = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.730837] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.task_poll_interval = 0.5 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731023] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.use_linked_clone = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731201] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.vnc_keymap = en-us {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731368] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.vnc_port = 5900 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731528] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vmware.vnc_port_total = 10000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731711] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.auth_schemes = ['none'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.731888] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.732214] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.732399] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.732573] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.novncproxy_port = 6080 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.732763] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.server_listen = 127.0.0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.732946] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733122] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.vencrypt_ca_certs = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733283] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.vencrypt_client_cert = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733444] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vnc.vencrypt_client_key = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733622] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733785] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_deep_image_inspection = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.733945] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734123] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734289] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734454] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.disable_rootwrap = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734617] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.enable_numa_live_migration = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734778] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.734984] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.735192] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.735360] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.libvirt_disable_apic = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.735523] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.735688] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.735853] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736028] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736198] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736361] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736526] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736689] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.736854] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737028] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737327] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737401] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.client_socket_timeout = 900 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737561] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.default_pool_size = 1000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737769] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.keep_alive = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.737950] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.max_header_line = 16384 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738152] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738327] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.ssl_ca_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738492] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.ssl_cert_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738652] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.ssl_key_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738822] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.tcp_keepidle = 600 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.738998] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.739174] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] zvm.ca_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.739334] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] zvm.cloud_connector_url = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.739671] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.739853] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] zvm.reachable_timeout = 300 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.740054] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.enforce_new_defaults = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.740455] env[62508]: WARNING oslo_config.cfg [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 490.740645] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.enforce_scope = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.740845] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.policy_default_rule = default {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741050] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741229] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.policy_file = policy.yaml {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741405] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741568] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741730] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.741892] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742068] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742239] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_policy.remote_timeout = 60.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742411] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742587] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742761] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.connection_string = messaging:// {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.742930] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.enabled = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743112] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.es_doc_type = notification {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743275] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.es_scroll_size = 10000 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743439] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.es_scroll_time = 2m {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743601] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.filter_error_trace = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743766] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.hmac_keys = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.743931] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.sentinel_service_name = mymaster {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744106] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.socket_timeout = 0.1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744272] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.trace_requests = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744434] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler.trace_sqlalchemy = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744615] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler_jaeger.process_tags = {} {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744775] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler_jaeger.service_name_prefix = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.744935] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] profiler_otlp.service_name_prefix = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745112] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] remote_debug.host = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745274] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] remote_debug.port = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745449] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745611] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745774] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.745936] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746111] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746273] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746431] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746592] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746749] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.746918] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747089] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747261] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747424] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747627] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747822] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.747996] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.748174] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.748346] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.748512] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.748675] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.748837] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749009] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749181] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749339] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749503] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749692] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.749866] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750042] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750217] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750386] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750557] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750731] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.750891] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751083] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751260] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751422] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751608] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751779] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_notifications.retry = -1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.751964] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752156] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752330] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.auth_section = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752493] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.auth_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752651] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.cafile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752809] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.certfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.752974] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.collect_timing = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753147] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.connect_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753307] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.connect_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753463] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_id = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753631] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753792] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_override = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.753947] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754115] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754268] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.endpoint_service_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754431] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.insecure = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754588] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.keyfile = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754739] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.max_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.754896] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.min_version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755067] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.region_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755227] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.retriable_status_codes = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755387] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.service_name = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755539] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.service_type = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755704] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.split_loggers = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.755862] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.status_code_retries = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756017] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.status_code_retry_delay = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756179] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.timeout = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756333] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.valid_interfaces = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756487] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_limit.version = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756651] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_reports.file_event_handler = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756818] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.756974] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] oslo_reports.log_dir = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.757177] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.757312] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.757468] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.757665] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.757861] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758017] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758201] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758371] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758524] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758688] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758832] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.758991] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] vif_plug_ovs_privileged.user = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.759177] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.759359] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.759532] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.759731] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.759915] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760111] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760271] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760431] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760614] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760787] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.isolate_vif = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.760958] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761143] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761326] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761483] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761653] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] os_vif_ovs.per_port_bridge = False {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761826] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.capabilities = [21] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.761988] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762162] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.helper_command = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762329] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762499] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762653] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] privsep_osbrick.user = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762829] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.762989] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.group = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.763162] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.helper_command = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.763327] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.763491] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.763648] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] nova_sys_admin.user = None {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 490.763853] env[62508]: DEBUG oslo_service.service [None req-f132ed4d-a3c6-443a-8f44-9e1fdc88be35 None None] ******************************************************************************** {{(pid=62508) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 490.764374] env[62508]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 491.267858] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Getting list of instances from cluster (obj){ [ 491.267858] env[62508]: value = "domain-c8" [ 491.267858] env[62508]: _type = "ClusterComputeResource" [ 491.267858] env[62508]: } {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 491.269045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f041b-52f1-4b77-94ee-75c89080752f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.278399] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Got total of 0 instances {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 491.278970] env[62508]: WARNING nova.virt.vmwareapi.driver [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 491.279487] env[62508]: INFO nova.virt.node [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Generated node identity 5d5b4923-a8ac-4688-9f86-2405bd3406a9 [ 491.279788] env[62508]: INFO nova.virt.node [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Wrote node identity 5d5b4923-a8ac-4688-9f86-2405bd3406a9 to /opt/stack/data/n-cpu-1/compute_id [ 491.783031] env[62508]: WARNING nova.compute.manager [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Compute nodes ['5d5b4923-a8ac-4688-9f86-2405bd3406a9'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 492.788723] env[62508]: INFO nova.compute.manager [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 493.794300] env[62508]: WARNING nova.compute.manager [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 493.794588] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 493.794812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 493.794964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 493.795134] env[62508]: DEBUG nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 493.797482] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36104e39-f2fe-4d60-8b57-8f42c5c1c36a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.805552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0ee29e-8cc5-4d95-b5d5-763084562bfd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.818823] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b08cee2-d844-4c22-ae16-75e80e826f9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.825118] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e204e9de-85dc-46cf-be99-3edf21e06f82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.852823] env[62508]: DEBUG nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181333MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 493.852982] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 493.853172] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.355277] env[62508]: WARNING nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] No compute node record for cpu-1:5d5b4923-a8ac-4688-9f86-2405bd3406a9: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 5d5b4923-a8ac-4688-9f86-2405bd3406a9 could not be found. [ 494.859584] env[62508]: INFO nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 [ 496.368018] env[62508]: DEBUG nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 496.368413] env[62508]: DEBUG nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 496.535319] env[62508]: INFO nova.scheduler.client.report [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] [req-b8019e15-bfc8-4b63-99c3-880e1df31887] Created resource provider record via placement API for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 496.552272] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd98910a-0cbc-4e3c-8576-edb432f0a42c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 496.560688] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f470aaf1-d049-4cc1-bb2e-dca073850a0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 496.590611] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bac4131-bf59-4f74-ba78-199b6781ab0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 496.597687] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce8d19d-eae9-49aa-b7ee-df71577d3ed3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 496.610355] env[62508]: DEBUG nova.compute.provider_tree [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 497.144354] env[62508]: DEBUG nova.scheduler.client.report [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 497.144575] env[62508]: DEBUG nova.compute.provider_tree [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 0 to 1 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 497.144716] env[62508]: DEBUG nova.compute.provider_tree [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 497.197266] env[62508]: DEBUG nova.compute.provider_tree [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 1 to 2 during operation: update_traits {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 497.701889] env[62508]: DEBUG nova.compute.resource_tracker [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 497.702257] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.849s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.702308] env[62508]: DEBUG nova.service [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Creating RPC server for service compute {{(pid=62508) start /opt/stack/nova/nova/service.py:186}} [ 497.715956] env[62508]: DEBUG nova.service [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] Join ServiceGroup membership for this service compute {{(pid=62508) start /opt/stack/nova/nova/service.py:203}} [ 497.716159] env[62508]: DEBUG nova.servicegroup.drivers.db [None req-1eff1c21-2ba2-48a3-acaa-0e010100201d None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62508) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 522.719714] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.223701] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Getting list of instances from cluster (obj){ [ 523.223701] env[62508]: value = "domain-c8" [ 523.223701] env[62508]: _type = "ClusterComputeResource" [ 523.223701] env[62508]: } {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 523.224929] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdd2ca7-dabe-4563-aaa0-7c44ac169ab5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.233779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Got total of 0 instances {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 523.234018] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.234291] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Getting list of instances from cluster (obj){ [ 523.234291] env[62508]: value = "domain-c8" [ 523.234291] env[62508]: _type = "ClusterComputeResource" [ 523.234291] env[62508]: } {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 523.235162] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c56e921-3554-4d0c-94ed-c0fa93be0f4c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.242276] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Got total of 0 instances {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 546.003309] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.003309] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.003309] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 546.003309] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 546.504992] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 546.505289] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.505496] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.505708] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.505900] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.506097] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.506281] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 546.506440] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 546.506583] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.009373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.009806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.009806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.010022] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 547.010847] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd22a554-4699-458c-8e45-3c8de0f69bd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.019045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44676bb7-0c49-42c9-b3a6-e16b31f2f3ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.032642] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5403b3-977f-40a7-b82c-de41c118022b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.038902] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1d480e-b362-4f9f-8d07-53141bd8c379 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.067767] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 547.067904] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.068103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.085322] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 548.085619] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 548.103691] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d510b5-c72c-46d5-a93d-a39cf680019d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.115311] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc53741c-2b28-464d-babf-1abe2f43e384 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.144332] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff0737c-bc3f-4cab-8231-47fadce95e84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.151172] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63770cb-1093-4d28-b9cb-770263615aed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.163998] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.666833] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.172408] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 549.172803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.104s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.160348] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.160755] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.665338] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.665516] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 609.665632] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 610.174836] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 610.175167] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175264] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175368] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175524] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175662] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175803] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.175928] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 610.176083] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.679675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.679951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.680169] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.680261] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 610.681255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a008b3-00ff-4008-a980-e89f97b1eb8a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.689440] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc20ef5b-4f70-481c-a0dd-ddf80be0120a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.702901] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7548f7-b502-4fac-9578-94d2abeccb9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.708771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc6326c-f200-4300-bb8b-fa8cf7655e57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.737143] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 610.737277] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.737452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.757157] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 611.757441] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 611.770538] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1a4180-aca2-4a6a-8eaf-436d9d605f3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.778369] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e635689f-4af0-4b34-ba91-506e532a9551 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.807398] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ad238e-f49e-42c5-81b8-9b56c796f36f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.814130] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e7465b-1206-4b1b-9d07-150cc0a046f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.826715] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.330129] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.331479] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 612.331664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.333058] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.333058] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.333058] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 672.333498] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 672.836506] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 672.836755] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.836892] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.837052] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.837207] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.837348] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.837496] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.837628] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 672.837767] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 673.340894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.341261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.341327] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.341470] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 673.342333] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d7e807-8e14-40e5-9433-2af5aacc2f61 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.350803] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e84126-05fa-4716-a9cb-8e8ecb161282 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.364586] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dfc81b-2e29-40f4-85af-7db619274b5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.370695] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f4c52d-7ae7-4807-bd86-cf7c1489fef5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.400422] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 673.400598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.400752] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.418800] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 674.419158] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 674.431451] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556132a0-d9a5-4752-8dce-bf58675c521b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.439120] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed82bab-b6d2-405c-95ba-cd5112882e61 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.467795] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b9ad7d-425f-4ec8-bb6d-536771640722 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.474467] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d387c6-5527-40eb-941c-4ba921f4c2c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.486774] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.989369] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.990639] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 674.990817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.647271] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.151573] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.151769] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 729.151860] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 729.657103] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 729.657465] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.657465] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.657539] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.160439] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.160698] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.160869] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.161034] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 730.161957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124c3f57-8190-4799-a922-17dc8880fbae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.170284] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f99cc51-268b-4b52-a4f6-c491556d5094 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.184258] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a599c1d0-5ba2-4f00-8233-70a22f1524ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.190196] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3f0562-a6c4-4910-8180-f0abda3f8ebf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.218021] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181384MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 730.218176] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.218362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.240218] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 731.240487] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 731.253630] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb817b28-8d30-404b-a28f-7414a3b8ec1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.262014] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149d784d-8f5a-408c-86c9-3f57d532ce69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.291459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593233aa-711d-4e6a-b037-0e15081ed0f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.298993] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e696d976-e472-4601-898b-3ed0e4bdfb0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.311821] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.814694] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.815964] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 731.816170] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.598s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.152715] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.152954] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.153141] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.153299] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.153449] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.153585] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 785.993308] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 785.993752] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 786.497606] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] There are 0 instances to clean {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 786.497864] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.498016] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances with incomplete migration {{(pid=62508) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 787.001275] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.504142] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.504526] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 789.504526] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 790.007826] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 790.008168] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.008357] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.008509] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.008660] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.511777] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.512147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.512147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.512147] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 790.514091] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4075535-e40e-4e97-b270-053c8fe21313 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.521746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767f5b9-7397-4873-b4d4-146552edc801 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.535365] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758df905-fc82-4296-aa88-6aeec9f81dff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.541489] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20a8d75-7069-4ec5-a01b-0e7a71bb20cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.569702] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181397MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 790.570039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.570372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.590037] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 791.590037] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 791.602050] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f1630a-a350-415d-82e3-de5175570235 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.609520] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48de2b2b-6db5-40f0-bbe8-9e43da1ba91f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.638305] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d41581f-c693-4a6b-8a29-3fc931c06d20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.644765] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2295e87-e61c-4c98-b7b4-57bcd99f9ce4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.657849] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.161186] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.162474] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 792.162658] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.148112] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.148357] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.148559] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.148749] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.148894] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 847.993860] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.497677] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.497945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.498088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.498243] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 848.499144] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5c1cff-3bec-470e-b38f-af597d11ccd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.507305] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4401e795-8673-48ef-941d-97c7c64f65a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.521764] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5940f3a3-1f37-46ff-aa6f-bc962cb0b751 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.528101] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253d6331-e3fa-4cd8-a5e1-88458f5385f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.556744] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181391MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 848.556918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.557071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.589969] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 849.590259] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 849.607574] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 849.622207] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 849.622406] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.633571] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 849.648645] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 849.659317] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281c69c9-b880-466b-abd8-9c9cf02cfd76 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.666558] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ba33a1-f678-4f4d-91db-a88600c16fcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.697261] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685758ef-c1a4-467a-acdc-46fae14dedd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.704350] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd8bb26-a2d7-4d6a-ae6f-dc8c5e9bab65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.716940] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.219574] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.220822] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 850.221012] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.215944] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.720156] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.720340] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 851.720508] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 852.224102] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 852.224102] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.224102] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.224102] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.224102] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.224102] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.224639] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 852.994825] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.989566] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.993676] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.497272] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.497517] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.497707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.497822] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 909.498731] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252072ac-b079-4844-907c-31ea8b993023 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.506569] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8466c41-4974-46f5-8eb0-2c8d20c810a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.520592] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383a234f-49cb-48ec-8100-2a52c14df81f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.526423] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2240e29-d208-4212-ac20-66b783b89692 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.554204] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.554354] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.554520] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.574211] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 910.575021] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 910.587896] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f38b18-18e7-4c3b-bb18-c7a4fddf775b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.595565] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5089696-9e45-4eb7-93e3-d1098f24c67e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.625354] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fe3eef-7109-4960-83bc-46c32348fe5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.632550] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13943680-7de2-4c8c-ac8e-0918ea1148f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.645137] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.148372] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.149655] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 911.149833] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.149924] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.150352] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 913.150352] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 913.653248] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 913.653248] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.653248] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.653248] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.653613] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.653613] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.653613] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 914.993925] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.990139] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.994660] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.497975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.498841] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.498841] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.498841] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 969.499525] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3c6cd7-559c-4dfd-acaa-490fa18aba36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.507527] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0d9cd6-86e9-44ac-90f0-cacb5282b684 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.521169] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933799cb-2aee-4a5a-8369-7e6c20d2ab2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.527044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63dc4e5-6d72-4374-afc8-99e8f8cd65ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.555846] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181396MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 969.555986] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.556185] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.575024] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 970.575314] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 970.587328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b7cdff-036c-418d-8221-185acd8e5d8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.594761] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e280f28b-abae-4bea-b47c-1a99de2bbd60 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.623813] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa6ccc-e259-4000-b405-6e820c7e23e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.630449] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4f6793-91d2-4df9-836c-eea1ee68d34c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.644760] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.148396] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.149653] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 971.149859] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.149631] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.656317] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.656317] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 973.656317] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 974.158722] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 974.159083] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.159222] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.159354] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.159512] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.159644] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 974.994546] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.994083] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.990359] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.994580] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.498220] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.498484] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.498618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.498772] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1029.499887] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebfc94d-c66a-47d4-ad44-77bb33e7990f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.508193] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feac36c-4db8-4fdd-8388-88aed7140c16 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.522373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c665d67-aa50-4b8f-b8f2-0f2d356726b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.528636] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cffaa6a-0fa6-4ecd-b848-666533e028c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.558636] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1029.558798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.558952] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.583049] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1030.583049] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1030.595706] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1aa2abe-e1f8-443f-ab87-0c22cb2cf781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.603406] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d7c1eb-5e77-4331-bce8-390d067c1aad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.632227] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c448912d-d6f3-49ac-9d10-bcfc2400312c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.638790] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6427471e-f0a5-4ade-97de-3f0238b3f7e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.651533] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.155213] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.156521] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1031.156710] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.598s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.155588] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.155985] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1033.155985] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1033.659181] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1033.659423] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.993323] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.993536] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.993677] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.993870] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1036.995135] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1038.994999] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.989099] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.994716] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.496685] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.000237] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.000461] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.000624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.000774] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1091.001736] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85bdbae-a692-44d1-b77d-cd5df2cdd335 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.010024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435d32ac-2e45-4aa1-a092-0c8b73707449 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.023821] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c98311-9532-4bb2-91de-3e6452ba4670 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.030043] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1d4078-bed8-4be7-b529-a29f66084bfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.059133] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181393MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1091.059283] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.059456] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.078391] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1092.078391] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1092.090179] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624b70df-1d68-4c48-93fa-035f5602e240 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.097283] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801d0c6b-9abd-41a5-b375-dbad9e6a90e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.127010] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98f4b81-07d5-45b4-9657-2d10f0ba8528 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.133689] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b6e795-d298-4729-a090-7f6cf5b8602a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.146289] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.649858] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.651214] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1092.651396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.651601] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.651740] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances with incomplete migration {{(pid=62508) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1094.647134] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.151933] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.152140] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1095.152269] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1095.656450] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1095.656824] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.656863] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.657050] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.657188] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1095.993719] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.993957] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.994139] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1096.497371] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] There are 0 instances to clean {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1099.498918] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.994602] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.988569] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.722504] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.226417] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Getting list of instances from cluster (obj){ [ 1123.226417] env[62508]: value = "domain-c8" [ 1123.226417] env[62508]: _type = "ClusterComputeResource" [ 1123.226417] env[62508]: } {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1123.227874] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998999f6-be21-4a2c-a0b6-071a28a9ab98 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.236490] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Got total of 0 instances {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1151.993949] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.498045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.498318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.498448] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.498609] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1152.499534] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d68e9f-4cdd-4531-bfc2-7f8453b1bdda {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.507744] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6de4cf-dd5d-4b54-8bc5-25b00af92314 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.521543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117d41d0-db04-4b82-98a4-a4ef016ea7fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.527543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc1ccdf-f995-41b7-9d60-5c67036afa1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.556470] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1152.556655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.556788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.674940] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1153.675226] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1153.690789] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1153.703326] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1153.703529] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1153.714087] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1153.733123] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1153.745782] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eec05a4-2844-460d-bd6a-27267056b85c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.753299] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae6be34-ddc4-4e7d-a515-92096e37200c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.782281] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9bd78b-1c6e-49f8-a04e-b1a4ffb000d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.788916] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a40b59-e8a0-4e19-bf06-33932d8f20c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.802327] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.305396] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.306641] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1154.306832] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.750s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.308771] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.308771] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1155.308771] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1155.814425] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1155.814425] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.814645] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.814788] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.814853] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1156.994684] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.994610] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.989623] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.993631] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.988633] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.495863] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.495863] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1213.495863] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1214.001802] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1214.002207] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.505021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.505270] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.505441] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.505598] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1214.506503] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22767fc5-b48f-44e7-822a-f9217c33785c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.514764] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5969335-38f9-4734-a97d-cdd812b2a0a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.528100] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65f0927-ee5d-44d5-8d3d-17d1b3981d81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.533942] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42445910-cd62-4c4b-8666-3ea08b0f02c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.562493] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1214.562644] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.562816] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.581481] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.581757] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1215.594332] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bf3552-3018-47c8-b360-1f3e8cb039ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.602606] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27ab7f3-2b49-4970-92ae-41106abaecc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.632043] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5608bb-dc38-481e-8a24-4022f74c37f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.638864] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3569b477-cbd1-48c1-83c6-fe6009a3c69f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.651383] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.155012] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.156311] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1216.156498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.148588] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.148974] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.149047] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.149224] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.149362] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1220.994608] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.994478] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.989125] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1269.490466] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.490785] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.994455] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1270.370800] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "fb7519c4-0254-4831-81f3-0eed14844f2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.371153] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.547985] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.550018] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.550763] env[62508]: INFO nova.compute.claims [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1270.873211] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1271.277306] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.277541] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.397394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.556237] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "b182d3aa-a4de-4879-ab36-2cb51472158a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.556237] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.672256] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9929c4de-ef3b-48ff-b3a3-b9039eb61527 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.682622] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd54b8f-13df-4cf2-a6aa-3956d313e8b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.715873] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53dd739-a7e3-4131-bfad-3f5acbc61517 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.724889] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb4ffee-4cb4-4981-a100-2bdb5df9d602 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.741640] env[62508]: DEBUG nova.compute.provider_tree [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.781023] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1271.799893] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "827b0887-2132-49af-bcce-cedc7237245d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.799893] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.060739] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1272.246088] env[62508]: DEBUG nova.scheduler.client.report [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1272.303757] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1272.318245] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.599125] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.599632] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.612722] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.751555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.752160] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1272.756343] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.359s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.758725] env[62508]: INFO nova.compute.claims [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.764352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.764549] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.834691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.103802] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1273.268495] env[62508]: DEBUG nova.compute.utils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1273.270244] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1273.270734] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1273.276039] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1273.630321] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.782189] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1273.810735] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.964733] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96457f4f-ff71-4213-acd0-541c808e135c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.973246] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e8372a-bff8-4a65-9e12-9410cfa70ee4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.008318] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.008318] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1274.008318] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1274.023024] env[62508]: DEBUG nova.policy [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b84719d9c5842a5a144d91691b5fc12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d188021b90b4a7cb04521e090d0c1c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1274.025511] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e948831c-f8ad-4e92-9b37-8a290b11cfbf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.031933] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1274.031933] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1274.031933] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1274.038247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25666e0-399b-425f-a0ae-2f3cd2c25c2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.054676] env[62508]: DEBUG nova.compute.provider_tree [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.560310] env[62508]: DEBUG nova.scheduler.client.report [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.804634] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1274.842175] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1274.842175] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1274.842175] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.842378] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1274.842378] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.842378] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1274.842378] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1274.842378] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1274.842933] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1274.843257] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1274.843598] env[62508]: DEBUG nova.virt.hardware [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1274.847225] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d84df2-ee17-4cfd-8a38-061e26d3f6e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.860734] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c0c32e-baac-40aa-9b97-fbc35cf36592 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.894631] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc3207d-9fc5-48d8-8ba9-53fd59f5ee9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.995578] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.068049] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.068592] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1275.077636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.757s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.077636] env[62508]: INFO nova.compute.claims [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1275.471092] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Successfully created port: 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1275.498255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.583544] env[62508]: DEBUG nova.compute.utils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1275.587399] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1275.587868] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1275.754266] env[62508]: DEBUG nova.policy [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f551d4352d914c58a846380ffe016437', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa9fc12f40cf45729330d52f46688f41', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1276.091167] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1276.243399] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0788d9-d977-4905-b7a6-6150eedc9edc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.256651] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4feca714-2cdc-46b8-ae66-eb0135fdf881 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.306381] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ec9ffc-e0fe-47c0-817f-fc1c20a8a781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.317926] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a072f0-724b-44e8-91a4-3dd151a93608 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.333908] env[62508]: DEBUG nova.compute.provider_tree [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.838467] env[62508]: DEBUG nova.scheduler.client.report [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.980503] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Successfully created port: 6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1277.101324] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1277.136502] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1277.136771] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1277.136930] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.137205] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1277.137703] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.137837] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1277.138061] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1277.138229] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1277.138396] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1277.138559] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1277.138726] env[62508]: DEBUG nova.virt.hardware [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1277.139805] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c7ff1c-d283-4781-82ba-f5c566658c38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.148217] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d84025-1fb6-414a-adb7-4b99a17252bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.345097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.345779] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1277.352494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.737s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.352494] env[62508]: INFO nova.compute.claims [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1277.854613] env[62508]: DEBUG nova.compute.utils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1277.860189] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1278.360162] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1278.552775] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428b2d98-0161-455d-bd22-5f0a0c60b061 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.566476] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4a07c6-205e-4434-9278-7180d024a8eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.604017] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3aaef9-2059-423c-94d4-42f41451c8ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.612982] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14aa5eb3-82b5-42f7-a56a-1e093e231a6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.633646] env[62508]: DEBUG nova.compute.provider_tree [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.696881] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Successfully updated port: 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1278.848995] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.851373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.151884] env[62508]: DEBUG nova.scheduler.client.report [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1279.208165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.209086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.210555] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1279.354056] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1279.377324] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1279.411315] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1279.413402] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1279.414285] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1279.416597] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1279.416597] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1279.416597] env[62508]: DEBUG nova.virt.hardware [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1279.417489] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac7f094-36f1-4b64-8580-561739596447 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.426143] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Successfully updated port: 6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.439139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed57b65-df1b-46f4-831e-1c4dab3172db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.460215] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.474654] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.475293] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb41cbe2-dc05-4a59-9d9d-5930b2354195 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.495175] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created folder: OpenStack in parent group-v4. [ 1279.495175] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating folder: Project (a925c3fb67004e90a3427b5419b33d72). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.495362] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e83b668c-f7e1-4717-bf3b-2d48dd4ab9f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.505768] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created folder: Project (a925c3fb67004e90a3427b5419b33d72) in parent group-v368536. [ 1279.505921] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating folder: Instances. Parent ref: group-v368537. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.506624] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1e3e794-2f46-457d-abee-67b37641f1fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.515676] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created folder: Instances in parent group-v368537. [ 1279.516023] env[62508]: DEBUG oslo.service.loopingcall [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.516796] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1279.516796] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60cbe5f7-c6ce-4cc3-9406-9c7635c49603 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.537794] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1279.537794] env[62508]: value = "task-1775262" [ 1279.537794] env[62508]: _type = "Task" [ 1279.537794] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.548944] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775262, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.662185] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.662394] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1279.666248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.832s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.677106] env[62508]: INFO nova.compute.claims [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.684153] env[62508]: DEBUG nova.compute.manager [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-vif-plugged-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1279.684153] env[62508]: DEBUG oslo_concurrency.lockutils [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] Acquiring lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.684153] env[62508]: DEBUG oslo_concurrency.lockutils [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.684153] env[62508]: DEBUG oslo_concurrency.lockutils [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.684153] env[62508]: DEBUG nova.compute.manager [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] No waiting events found dispatching network-vif-plugged-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1279.684440] env[62508]: WARNING nova.compute.manager [req-f4231d64-7ba4-4986-a52f-da371a9a2c4b req-5cf07fe3-53a2-4af2-8e5b-143bb3e32eef service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received unexpected event network-vif-plugged-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd for instance with vm_state building and task_state spawning. [ 1279.798510] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1279.889871] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.929857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.930117] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.930359] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.053275] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775262, 'name': CreateVM_Task, 'duration_secs': 0.360525} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.054146] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1280.057583] env[62508]: DEBUG oslo_vmware.service [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b163a8-03ab-4c68-8f54-7bf29e71e92d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.063485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.063485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.063842] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1280.064198] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52ecb30c-30d3-416f-8f09-4f3e5f52e36f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.070079] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1280.070079] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fc954f-b2f4-7b6b-7638-84d220513b7a" [ 1280.070079] env[62508]: _type = "Task" [ 1280.070079] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.079319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.079568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.086842] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.087083] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1280.087324] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.087460] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.087874] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.088122] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37b2a3ea-5442-4620-9dbd-ef4cca922fde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.107436] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.107436] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1280.108338] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ec4f03-322a-46fd-a2df-e5f2c94cfe2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.117778] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62f5650-fb3e-492d-9101-8468c113cc25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.123171] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1280.123171] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52521dec-f63e-4ce4-a254-ac4090f45f79" [ 1280.123171] env[62508]: _type = "Task" [ 1280.123171] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.133088] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52521dec-f63e-4ce4-a254-ac4090f45f79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.152646] env[62508]: DEBUG nova.network.neutron [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.179275] env[62508]: DEBUG nova.compute.utils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1280.184486] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1280.184612] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1280.246242] env[62508]: DEBUG nova.policy [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '645e5dfe78b5406897ac6bb33ea0bc0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81e9cc83a10a439da4100e4e5725370f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1280.395138] env[62508]: DEBUG nova.compute.manager [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Received event network-vif-plugged-6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1280.395138] env[62508]: DEBUG oslo_concurrency.lockutils [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] Acquiring lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.395138] env[62508]: DEBUG oslo_concurrency.lockutils [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.395138] env[62508]: DEBUG oslo_concurrency.lockutils [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.395138] env[62508]: DEBUG nova.compute.manager [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] No waiting events found dispatching network-vif-plugged-6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.395521] env[62508]: WARNING nova.compute.manager [req-d40f4f44-6a71-4d2d-81e5-17f067720c74 req-0eb225bf-d6fe-4a89-ae38-cc4455b9a567 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Received unexpected event network-vif-plugged-6f2165d0-7bca-4b3b-8351-aa81a2b3af95 for instance with vm_state building and task_state spawning. [ 1280.581919] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1280.637598] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1280.638080] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating directory with path [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.638466] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7c1510f-0119-4cc7-9306-58cc3de2fcda {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.654520] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.655197] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Instance network_info: |[{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1280.656279] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:65:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '987ac3c1-9f91-4672-9ca9-339fd8ad1dfd', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.665211] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Creating folder: Project (5d188021b90b4a7cb04521e090d0c1c8). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1280.667838] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a39ffe1-8f1a-424f-9ac0-a037bed3b439 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.670048] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created directory with path [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.670410] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Fetch image to [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1280.670735] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Downloading image file data f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 to [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62508) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1280.672856] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccff9903-e3ab-4e60-98fc-7673af6dd771 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.676284] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1280.688293] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Created folder: Project (5d188021b90b4a7cb04521e090d0c1c8) in parent group-v368536. [ 1280.688437] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Creating folder: Instances. Parent ref: group-v368540. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1280.691020] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3495300f-a200-4630-afc2-bf559f4f9b1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.697177] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1280.701377] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10860808-5799-4184-ba53-1ee3173d4159 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.709132] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Created folder: Instances in parent group-v368540. [ 1280.709222] env[62508]: DEBUG oslo.service.loopingcall [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.714150] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1280.714564] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f71ad97-346a-48b2-b886-8af99331cef7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.731681] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e72c2a0-beef-4491-956b-16cc2968b835 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.792789] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96862745-c072-4081-b627-347e23fe3805 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.796328] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.796328] env[62508]: value = "task-1775265" [ 1280.796328] env[62508]: _type = "Task" [ 1280.796328] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.801788] env[62508]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c37c6360-4650-4567-b7ff-6c0538972a53 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.807369] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775265, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.897720] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Downloading image file data f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 to the data store datastore1 {{(pid=62508) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1280.966529] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48a9cac-a52d-42a7-8e93-506ac6959ade {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.975734] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1526b6-06de-45f8-98c5-553fcdf6619e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.016998] env[62508]: DEBUG oslo_vmware.rw_handles [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1281.019428] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e7c430-e3a9-41bd-a413-9ce15b6cbb23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.090693] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Successfully created port: 6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.095111] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03bbaec-181b-49b3-8345-6dce3d1de897 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.117515] env[62508]: DEBUG nova.compute.provider_tree [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.134647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.311977] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775265, 'name': CreateVM_Task, 'duration_secs': 0.379316} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.313697] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1281.324255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.324505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.343244] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.343460] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.343811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1281.344183] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4277ab-bb6d-4b44-932a-57a9ccad584b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.352204] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1281.352204] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b82a2f-56ef-ce70-104d-358269caa47f" [ 1281.352204] env[62508]: _type = "Task" [ 1281.352204] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.354372] env[62508]: DEBUG nova.network.neutron [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Updating instance_info_cache with network_info: [{"id": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "address": "fa:16:3e:5a:95:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f2165d0-7b", "ovs_interfaceid": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.368488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.368793] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.369248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.624396] env[62508]: DEBUG nova.scheduler.client.report [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1281.716599] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1281.760043] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1281.760043] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1281.760043] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1281.760335] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1281.760335] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1281.760335] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1281.760335] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1281.760575] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1281.760715] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1281.760894] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1281.761129] env[62508]: DEBUG nova.virt.hardware [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1281.762097] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35757b18-206c-48e8-83b6-b14b6f3ac616 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.774077] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9b12ce-87a2-498d-b395-7cfbb253944f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.811413] env[62508]: DEBUG oslo_vmware.rw_handles [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1281.811768] env[62508]: DEBUG oslo_vmware.rw_handles [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1281.827586] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1281.860620] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.861570] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Instance network_info: |[{"id": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "address": "fa:16:3e:5a:95:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f2165d0-7b", "ovs_interfaceid": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1281.864195] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:95:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f2165d0-7bca-4b3b-8351-aa81a2b3af95', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.873901] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Creating folder: Project (aa9fc12f40cf45729330d52f46688f41). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.875161] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Downloaded image file data f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 to vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62508) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1281.877984] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1281.879104] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copying Virtual Disk [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk to [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1281.879104] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4cc45013-c231-4fc1-b3f7-427eca6a6c76 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.881750] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7985d95-21d2-4187-aa45-4d45282d873c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.891736] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1281.891736] env[62508]: value = "task-1775267" [ 1281.891736] env[62508]: _type = "Task" [ 1281.891736] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.899193] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Created folder: Project (aa9fc12f40cf45729330d52f46688f41) in parent group-v368536. [ 1281.899437] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Creating folder: Instances. Parent ref: group-v368543. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.900132] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f154576-9d14-42bd-9f90-17579122ed88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.906351] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.914120] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Created folder: Instances in parent group-v368543. [ 1281.914230] env[62508]: DEBUG oslo.service.loopingcall [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.914391] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1281.915041] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b06c9ef8-754f-4e4f-9932-245a17de2d35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.949884] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.949884] env[62508]: value = "task-1775269" [ 1281.949884] env[62508]: _type = "Task" [ 1281.949884] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.960248] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775269, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.131832] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.132489] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1282.139883] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.509s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.143034] env[62508]: INFO nova.compute.claims [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1282.168903] env[62508]: DEBUG nova.compute.manager [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1282.168903] env[62508]: DEBUG nova.compute.manager [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing instance network info cache due to event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1282.168903] env[62508]: DEBUG oslo_concurrency.lockutils [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.168903] env[62508]: DEBUG oslo_concurrency.lockutils [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.169125] env[62508]: DEBUG nova.network.neutron [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.365050] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.407001] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775267, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.464420] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775269, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.641481] env[62508]: DEBUG nova.compute.utils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1282.649033] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1282.649033] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1282.818915] env[62508]: DEBUG nova.compute.manager [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Received event network-changed-6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1282.818915] env[62508]: DEBUG nova.compute.manager [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Refreshing instance network info cache due to event network-changed-6f2165d0-7bca-4b3b-8351-aa81a2b3af95. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1282.818988] env[62508]: DEBUG oslo_concurrency.lockutils [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] Acquiring lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.819844] env[62508]: DEBUG oslo_concurrency.lockutils [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] Acquired lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.819844] env[62508]: DEBUG nova.network.neutron [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Refreshing network info cache for port 6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.903998] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775267, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698635} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.904265] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copied Virtual Disk [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk to [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1282.904466] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleting the datastore file [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/tmp-sparse.vmdk {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.904730] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e00cff3-4fe5-4893-862a-90b51cf9ccf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.911307] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1282.911307] env[62508]: value = "task-1775270" [ 1282.911307] env[62508]: _type = "Task" [ 1282.911307] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.919567] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.966882] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775269, 'name': CreateVM_Task, 'duration_secs': 0.653036} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.966882] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.967568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.967568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.967568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1282.969303] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5084668-9fc6-43a0-a427-840ed74e3fa0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.973026] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1282.973026] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f27e57-d614-f498-c99c-9c487ef6c5fa" [ 1282.973026] env[62508]: _type = "Task" [ 1282.973026] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.981135] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f27e57-d614-f498-c99c-9c487ef6c5fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.045305] env[62508]: DEBUG nova.policy [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fdb789bc0c5459b9dfbb59ada7ff909', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49f6b1eaa6ba4ff2bc4b783ce5d3d0e1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1283.156187] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1283.343892] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02bd31f-2173-4800-8d36-7f2388ce8c19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.352157] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4081c43d-507b-444f-846c-7f2f7e144154 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.385078] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bd9f42-1fbc-448b-9160-ca95b2b4aae7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.393614] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de71226b-c0b4-4e53-8847-681b3b2f8535 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.408275] env[62508]: DEBUG nova.compute.provider_tree [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.419909] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031633} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.420830] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.420963] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Moving file from [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 to [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7. {{(pid=62508) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1283.421231] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-09e8a66c-b49b-40a5-988b-8bbb3a49b1c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.428691] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1283.428691] env[62508]: value = "task-1775271" [ 1283.428691] env[62508]: _type = "Task" [ 1283.428691] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.437218] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775271, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.486798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.487047] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1283.487254] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1283.912466] env[62508]: DEBUG nova.scheduler.client.report [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.945344] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775271, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027186} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.945344] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] File moved {{(pid=62508) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1283.945571] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Cleaning up location [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1283.945893] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleting the datastore file [datastore1] vmware_temp/88f52982-aa82-42c1-aea0-aa17c8dbeac8 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.946278] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca0872ee-97d7-43c1-bcdb-060bf383d0e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.953699] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1283.953699] env[62508]: value = "task-1775272" [ 1283.953699] env[62508]: _type = "Task" [ 1283.953699] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.965111] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.168477] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1284.178845] env[62508]: DEBUG nova.network.neutron [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updated VIF entry in instance network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1284.179167] env[62508]: DEBUG nova.network.neutron [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.202778] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1284.202897] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1284.203024] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.203220] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1284.203364] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.203510] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1284.203717] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1284.203870] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1284.204040] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1284.204205] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1284.204376] env[62508]: DEBUG nova.virt.hardware [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1284.205501] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dff2df-cccd-4f2f-8885-c8614086abdb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.214396] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e7dd26-80cc-4609-97a6-243e6bea7584 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.420098] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.420500] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1284.424725] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.613s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.426275] env[62508]: INFO nova.compute.claims [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1284.465333] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02634} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.467839] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.467839] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-905cf632-68c4-47c5-8457-2a17b6852025 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.474775] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1284.474775] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52afc2fb-2235-b6b9-06ee-fb83fc8c2c32" [ 1284.474775] env[62508]: _type = "Task" [ 1284.474775] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.486942] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52afc2fb-2235-b6b9-06ee-fb83fc8c2c32, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.487220] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.487543] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1284.487749] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.487934] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1284.488163] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb3a7812-f09f-4574-a9b3-9a171363ed15 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.492714] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12a846c1-9fb6-4b3c-bff4-26ba55851c99 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.502096] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1284.502096] env[62508]: value = "task-1775273" [ 1284.502096] env[62508]: _type = "Task" [ 1284.502096] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.503313] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1284.503399] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1284.508667] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6836597c-fdf1-4136-b17d-d120e02f9eb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.517863] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.520943] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1284.520943] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ea1362-5ec0-ffb0-ffd4-0721b611410c" [ 1284.520943] env[62508]: _type = "Task" [ 1284.520943] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.527048] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ea1362-5ec0-ffb0-ffd4-0721b611410c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.683151] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Successfully updated port: 6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1284.685234] env[62508]: DEBUG oslo_concurrency.lockutils [req-80ef03bf-952a-4063-8a0f-6000f958cd4b req-dccf3c92-7b86-4456-b414-c57f94cef452 service nova] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.689202] env[62508]: DEBUG nova.network.neutron [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Updated VIF entry in instance network info cache for port 6f2165d0-7bca-4b3b-8351-aa81a2b3af95. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1284.689202] env[62508]: DEBUG nova.network.neutron [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Updating instance_info_cache with network_info: [{"id": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "address": "fa:16:3e:5a:95:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f2165d0-7b", "ovs_interfaceid": "6f2165d0-7bca-4b3b-8351-aa81a2b3af95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.819955] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Successfully created port: 7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1284.931454] env[62508]: DEBUG nova.compute.utils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1284.938323] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1284.939317] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1285.019902] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509487} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.027307] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.027724] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.028477] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-546e12be-49b7-4037-8f01-55732761b25c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.041338] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ea1362-5ec0-ffb0-ffd4-0721b611410c, 'name': SearchDatastore_Task, 'duration_secs': 0.016096} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.044809] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1285.044809] env[62508]: value = "task-1775274" [ 1285.044809] env[62508]: _type = "Task" [ 1285.044809] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.044809] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff2a67c-ef57-4224-b017-80a55b6c3968 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.053063] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1285.053063] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e0354e-d492-d419-04e6-9c6857049664" [ 1285.053063] env[62508]: _type = "Task" [ 1285.053063] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.058662] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.068191] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e0354e-d492-d419-04e6-9c6857049664, 'name': SearchDatastore_Task, 'duration_secs': 0.008776} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.068305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.068563] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ee99ff4d-9996-4cfa-b038-7b19aef27438/ee99ff4d-9996-4cfa-b038-7b19aef27438.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1285.068988] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.069216] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1285.069468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2373c2d-20d6-4541-b911-9f56ddd5c74a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.072138] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df2f52e8-65cb-4f53-8e90-3f690b662b1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.081681] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1285.081681] env[62508]: value = "task-1775275" [ 1285.081681] env[62508]: _type = "Task" [ 1285.081681] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.082845] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1285.083726] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1285.087792] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a3daf2-3bc2-4301-b530-1d2535dafdd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.095789] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.097309] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1285.097309] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d82997-0ae2-af27-2d4e-a0abc5fdde52" [ 1285.097309] env[62508]: _type = "Task" [ 1285.097309] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.110560] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d82997-0ae2-af27-2d4e-a0abc5fdde52, 'name': SearchDatastore_Task, 'duration_secs': 0.01012} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.111341] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fa51142-8271-40a6-b144-e23640496973 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.119935] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1285.119935] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5289886f-73da-db5c-44a6-9abcdcca17a0" [ 1285.119935] env[62508]: _type = "Task" [ 1285.119935] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.133528] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5289886f-73da-db5c-44a6-9abcdcca17a0, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.133528] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.133528] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fb7519c4-0254-4831-81f3-0eed14844f2d/fb7519c4-0254-4831-81f3-0eed14844f2d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1285.133528] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6259fbf2-eedb-460f-afea-cf1b00fd9398 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.140099] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1285.140099] env[62508]: value = "task-1775276" [ 1285.140099] env[62508]: _type = "Task" [ 1285.140099] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.154485] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775276, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.185568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.185865] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquired lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.186090] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.191793] env[62508]: DEBUG oslo_concurrency.lockutils [req-f5fdfff8-0490-4ec5-af65-73d9a0dee13a req-2cbcb1cb-addb-45a8-a91a-31226a545ad4 service nova] Releasing lock "refresh_cache-fb7519c4-0254-4831-81f3-0eed14844f2d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.373358] env[62508]: DEBUG nova.policy [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bebc8625d4842c3b630da112442bcbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf3fdb58653a47149b5ae7316424d235', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1285.434731] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1285.556892] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124698} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.559894] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.561624] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5c4c8c-f2db-479f-a5fa-33f956ea4b6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.589145] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.592649] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-152db45a-5fe4-499d-8f85-cc2c8a466e4a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.621465] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.622937] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ee99ff4d-9996-4cfa-b038-7b19aef27438/ee99ff4d-9996-4cfa-b038-7b19aef27438.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.623548] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.624146] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1285.624146] env[62508]: value = "task-1775277" [ 1285.624146] env[62508]: _type = "Task" [ 1285.624146] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.626923] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b7348a9-fe61-421e-8110-97548c2bf46b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.638917] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1285.638917] env[62508]: value = "task-1775278" [ 1285.638917] env[62508]: _type = "Task" [ 1285.638917] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.643492] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.664325] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775276, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.668265] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.707937] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eea73e7-039c-448e-9db8-1e2917e49fac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.719679] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92f55ad-7ae4-43d2-9fe2-e2ca64bb9314 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.760234] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4f781e-3a9f-4581-901a-5c541565f338 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.764278] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1285.772678] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba8a27b-02c7-4731-85b2-3d72e021b1b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.788810] env[62508]: DEBUG nova.compute.provider_tree [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1286.141782] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.160590] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087676} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.164032] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1286.165064] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775276, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.794162} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.165209] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e274c7ec-c6d6-43cb-9829-08ad154b9a64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.169419] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fb7519c4-0254-4831-81f3-0eed14844f2d/fb7519c4-0254-4831-81f3-0eed14844f2d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1286.169419] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1286.169419] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-753132ba-3b4b-40f1-8c37-e81ec2c442b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.195612] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] ee99ff4d-9996-4cfa-b038-7b19aef27438/ee99ff4d-9996-4cfa-b038-7b19aef27438.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.197823] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18cd7691-3a57-438c-b8b5-576095bd4787 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.213450] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1286.213450] env[62508]: value = "task-1775279" [ 1286.213450] env[62508]: _type = "Task" [ 1286.213450] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.221159] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1286.221159] env[62508]: value = "task-1775280" [ 1286.221159] env[62508]: _type = "Task" [ 1286.221159] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.226347] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775279, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.233638] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775280, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.293681] env[62508]: DEBUG nova.scheduler.client.report [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1286.414581] env[62508]: DEBUG nova.network.neutron [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Updating instance_info_cache with network_info: [{"id": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "address": "fa:16:3e:71:ea:e4", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e5092d1-84", "ovs_interfaceid": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.452257] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1286.492993] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1286.493265] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1286.493422] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1286.493604] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1286.493748] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1286.493907] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1286.495418] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1286.495418] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1286.495418] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1286.495418] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1286.495418] env[62508]: DEBUG nova.virt.hardware [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1286.496744] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65705cc4-001a-4946-8199-4553b7934135 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.510106] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ffa06d-0b23-4c34-8ff9-2c28337d9125 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.641357] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775277, 'name': ReconfigVM_Task, 'duration_secs': 0.866234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.641357] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1286.642029] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a7f9194-66a6-4095-af91-0cb22638af5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.651565] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1286.651565] env[62508]: value = "task-1775281" [ 1286.651565] env[62508]: _type = "Task" [ 1286.651565] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.663055] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775281, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.737316] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.737589] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067197} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.738885] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1286.738885] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f7948b-57e5-4ec0-a57e-59a7b55564f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.765829] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] fb7519c4-0254-4831-81f3-0eed14844f2d/fb7519c4-0254-4831-81f3-0eed14844f2d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.765829] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35bcb485-daa8-45c0-ae74-ca5c345756c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.785122] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.785372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.790492] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1286.790492] env[62508]: value = "task-1775282" [ 1286.790492] env[62508]: _type = "Task" [ 1286.790492] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.799451] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.809078] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.809398] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1286.812071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.314s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.812206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.816108] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1286.816496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.927s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.817985] env[62508]: INFO nova.compute.claims [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1286.823699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cca705-e7f1-4723-9f13-9b0b72b0e27b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.830984] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c174c0a0-340d-4a06-afa7-e2e61a141370 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.870776] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b7706-0858-4407-a626-fbdd56a1680b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.882296] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62972ce-6afb-42f5-9e03-3f1c7dc75688 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.914495] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1286.914618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.920375] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Releasing lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.920760] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Instance network_info: |[{"id": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "address": "fa:16:3e:71:ea:e4", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e5092d1-84", "ovs_interfaceid": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1286.921153] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:ea:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e5092d1-840e-43a2-9714-b0ee31c608f4', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1286.929227] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Creating folder: Project (81e9cc83a10a439da4100e4e5725370f). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.929583] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d07cd38-585d-40a2-8191-5ee478f69783 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.940076] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Created folder: Project (81e9cc83a10a439da4100e4e5725370f) in parent group-v368536. [ 1286.940351] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Creating folder: Instances. Parent ref: group-v368546. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.940745] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77715479-4763-4849-be6a-4e5941218bb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.950012] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Created folder: Instances in parent group-v368546. [ 1286.950951] env[62508]: DEBUG oslo.service.loopingcall [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1286.950951] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1286.950951] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-712c2ad1-b87d-4d4e-867f-c148f5504d9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.975488] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1286.975488] env[62508]: value = "task-1775285" [ 1286.975488] env[62508]: _type = "Task" [ 1286.975488] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.987627] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775285, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.116316] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Successfully created port: f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1287.165192] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775281, 'name': Rename_Task, 'duration_secs': 0.206926} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.165192] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.166416] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a046708-4e8f-4b3e-9018-b808f320bf3e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.179788] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1287.179788] env[62508]: value = "task-1775286" [ 1287.179788] env[62508]: _type = "Task" [ 1287.179788] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.191494] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.231236] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775280, 'name': ReconfigVM_Task, 'duration_secs': 0.669906} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.231559] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Reconfigured VM instance instance-00000001 to attach disk [datastore1] ee99ff4d-9996-4cfa-b038-7b19aef27438/ee99ff4d-9996-4cfa-b038-7b19aef27438.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.232233] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da1db971-a174-4ab7-a216-4948f8396eda {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.239184] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1287.239184] env[62508]: value = "task-1775287" [ 1287.239184] env[62508]: _type = "Task" [ 1287.239184] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.252803] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775287, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.308248] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775282, 'name': ReconfigVM_Task, 'duration_secs': 0.329517} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.308554] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Reconfigured VM instance instance-00000002 to attach disk [datastore1] fb7519c4-0254-4831-81f3-0eed14844f2d/fb7519c4-0254-4831-81f3-0eed14844f2d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.309230] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66fe094b-7e47-4d68-a004-022b9cd78e66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.322361] env[62508]: DEBUG nova.compute.utils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1287.323782] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1287.323782] env[62508]: value = "task-1775288" [ 1287.323782] env[62508]: _type = "Task" [ 1287.323782] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.324232] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1287.324387] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1287.339732] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775288, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.482861] env[62508]: DEBUG nova.policy [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33f0f120bd654d56bcae0a0f326df889', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c285de053cc4d5f80df78c29ae7eaf7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1287.492873] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775285, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.524431] env[62508]: DEBUG nova.compute.manager [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Received event network-vif-plugged-6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1287.524637] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Acquiring lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.524842] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.525021] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.528450] env[62508]: DEBUG nova.compute.manager [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] No waiting events found dispatching network-vif-plugged-6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1287.528640] env[62508]: WARNING nova.compute.manager [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Received unexpected event network-vif-plugged-6e5092d1-840e-43a2-9714-b0ee31c608f4 for instance with vm_state building and task_state spawning. [ 1287.528907] env[62508]: DEBUG nova.compute.manager [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Received event network-changed-6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1287.529021] env[62508]: DEBUG nova.compute.manager [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Refreshing instance network info cache due to event network-changed-6e5092d1-840e-43a2-9714-b0ee31c608f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1287.529194] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Acquiring lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.529328] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Acquired lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.529477] env[62508]: DEBUG nova.network.neutron [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Refreshing network info cache for port 6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1287.697224] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775286, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.754974] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775287, 'name': Rename_Task, 'duration_secs': 0.206659} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.755658] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.756808] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03db813d-b16b-4bfa-96cf-cb0ced9043df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.769496] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1287.769496] env[62508]: value = "task-1775289" [ 1287.769496] env[62508]: _type = "Task" [ 1287.769496] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.782947] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.825586] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1287.840122] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775288, 'name': Rename_Task, 'duration_secs': 0.211159} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.840257] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.841523] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee522a8d-42e0-47d0-a4b5-56a672885e72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.849851] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1287.849851] env[62508]: value = "task-1775290" [ 1287.849851] env[62508]: _type = "Task" [ 1287.849851] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.866608] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775290, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.880035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.880945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.987118] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775285, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.190409] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92a880c-9a71-4362-9443-39d0d47802b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.206409] env[62508]: DEBUG oslo_vmware.api [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775286, 'name': PowerOnVM_Task, 'duration_secs': 0.607819} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.208201] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.208634] env[62508]: INFO nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1288.209234] env[62508]: DEBUG nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1288.210652] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de724c13-3c5a-4f37-8794-2b08745a5812 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.215308] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838dfa25-abfd-4c3c-9d94-1a3cee52981e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.256458] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37852528-7a07-499a-9ab1-468048619dce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.262449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "575ea3dc-850d-4078-8678-41b3c40a4c27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.263680] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.271335] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1811c11f-d024-45eb-847f-f0495737fbaf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.296775] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "192995e7-82f5-41be-990d-d91b93f981e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.297041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.297531] env[62508]: DEBUG nova.compute.provider_tree [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.302404] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775289, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.359822] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.360057] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.365573] env[62508]: DEBUG oslo_vmware.api [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775290, 'name': PowerOnVM_Task, 'duration_secs': 0.514728} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.365833] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.366036] env[62508]: INFO nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Took 11.26 seconds to spawn the instance on the hypervisor. [ 1288.366214] env[62508]: DEBUG nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1288.367285] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40005fc5-3d16-43ea-af10-cc8d1fab1b0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.489468] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775285, 'name': CreateVM_Task, 'duration_secs': 1.309554} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.489468] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1288.489468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.489468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.489468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1288.489468] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-938a20da-38cd-4db0-b0a6-bfc1b2f7adce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.496644] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1288.496644] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ca8ca-e355-454b-c4a6-7cbc854235a2" [ 1288.496644] env[62508]: _type = "Task" [ 1288.496644] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.504280] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Successfully updated port: 7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1288.509133] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ca8ca-e355-454b-c4a6-7cbc854235a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.763862] env[62508]: INFO nova.compute.manager [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Took 16.47 seconds to build instance. [ 1288.792982] env[62508]: DEBUG oslo_vmware.api [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775289, 'name': PowerOnVM_Task, 'duration_secs': 0.559087} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.793267] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.793460] env[62508]: INFO nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Took 13.99 seconds to spawn the instance on the hypervisor. [ 1288.793638] env[62508]: DEBUG nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1288.794585] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0af0bac-b18a-419e-8880-a4efbe584d3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.806657] env[62508]: DEBUG nova.scheduler.client.report [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1288.841016] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1288.876494] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1288.876779] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1288.878022] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1288.878022] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1288.878022] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1288.878022] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1288.878022] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1288.878244] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1288.878422] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1288.878507] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1288.878680] env[62508]: DEBUG nova.virt.hardware [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1288.880145] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db134a8-34bb-4033-8d7a-3ccd8df0a90f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.895771] env[62508]: INFO nova.compute.manager [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Took 17.52 seconds to build instance. [ 1288.897022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6184b7e2-ff15-4dbc-a286-f9dfd8325c46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.963787] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Successfully created port: 69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1288.973586] env[62508]: DEBUG nova.network.neutron [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Updated VIF entry in instance network info cache for port 6e5092d1-840e-43a2-9714-b0ee31c608f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1288.974078] env[62508]: DEBUG nova.network.neutron [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Updating instance_info_cache with network_info: [{"id": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "address": "fa:16:3e:71:ea:e4", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e5092d1-84", "ovs_interfaceid": "6e5092d1-840e-43a2-9714-b0ee31c608f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.014636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.014636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquired lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.014636] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1289.014636] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ca8ca-e355-454b-c4a6-7cbc854235a2, 'name': SearchDatastore_Task, 'duration_secs': 0.023357} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.014907] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.016430] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1289.016809] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.016860] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.017060] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1289.017360] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f7f37d8-e54c-499b-833e-8c590da937dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.028211] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1289.028211] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1289.028809] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf59edf7-e3aa-40e4-b61b-b9a9924230e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.040145] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1289.040145] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ebca2e-a78e-198d-9703-42861b7f115f" [ 1289.040145] env[62508]: _type = "Task" [ 1289.040145] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.052982] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ebca2e-a78e-198d-9703-42861b7f115f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.180554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.181373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.266597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a33b0a5-ebec-4ce5-9125-1e54f0971daf tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.989s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.320294] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.320294] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1289.323583] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.189s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.324991] env[62508]: INFO nova.compute.claims [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1289.327453] env[62508]: INFO nova.compute.manager [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Took 18.82 seconds to build instance. [ 1289.403648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f15d72eb-582d-438c-aa6f-ce7648356fc0 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.032s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.478179] env[62508]: DEBUG oslo_concurrency.lockutils [req-f866854f-0d5c-4563-b277-16c15e010659 req-787901c6-06cf-40f1-9460-6488fcb3dc0a service nova] Releasing lock "refresh_cache-b182d3aa-a4de-4879-ab36-2cb51472158a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.549080] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ebca2e-a78e-198d-9703-42861b7f115f, 'name': SearchDatastore_Task, 'duration_secs': 0.014577} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.549934] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b44f60f8-6852-49e4-ba09-9d6902a3e5fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.556857] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1289.556857] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522a8c22-7d68-df48-9cd6-7a5035ad7921" [ 1289.556857] env[62508]: _type = "Task" [ 1289.556857] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.568487] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522a8c22-7d68-df48-9cd6-7a5035ad7921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.683443] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1289.769811] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1289.823612] env[62508]: DEBUG nova.compute.utils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1289.825541] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1289.833216] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e902f1af-d513-4359-8058-23c0f7602f63 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.340s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.870920] env[62508]: DEBUG nova.compute.manager [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Received event network-vif-plugged-7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1289.871186] env[62508]: DEBUG oslo_concurrency.lockutils [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] Acquiring lock "827b0887-2132-49af-bcce-cedc7237245d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.871377] env[62508]: DEBUG oslo_concurrency.lockutils [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] Lock "827b0887-2132-49af-bcce-cedc7237245d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.871572] env[62508]: DEBUG oslo_concurrency.lockutils [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] Lock "827b0887-2132-49af-bcce-cedc7237245d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.871695] env[62508]: DEBUG nova.compute.manager [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] No waiting events found dispatching network-vif-plugged-7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1289.871909] env[62508]: WARNING nova.compute.manager [req-49715cbe-c358-448a-9d62-c96223ed41ab req-9b281544-232e-4d98-a239-c91a3a67ca98 service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Received unexpected event network-vif-plugged-7f451ea1-30cb-4db4-a325-8e1ef277ec13 for instance with vm_state building and task_state spawning. [ 1289.906781] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1290.073578] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522a8c22-7d68-df48-9cd6-7a5035ad7921, 'name': SearchDatastore_Task, 'duration_secs': 0.026489} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.076547] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.076833] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b182d3aa-a4de-4879-ab36-2cb51472158a/b182d3aa-a4de-4879-ab36-2cb51472158a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1290.077468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5254d0f9-c0e8-4b31-aed2-4013f9efa6a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.085730] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1290.085730] env[62508]: value = "task-1775291" [ 1290.085730] env[62508]: _type = "Task" [ 1290.085730] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.094428] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.162034] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2b1529-e525-47e9-b62b-378fd41e19f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.171843] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a960c25-9684-4566-88b7-c695ce2cb7c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.206193] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155681b6-adde-47c4-803a-ec1092a4a4ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.214419] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba10c20a-74c5-49df-97b7-254a78b84d06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.227953] env[62508]: DEBUG nova.compute.provider_tree [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.301332] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.328135] env[62508]: DEBUG nova.network.neutron [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Updating instance_info_cache with network_info: [{"id": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "address": "fa:16:3e:a5:54:39", "network": {"id": "84fd8102-1459-44a5-857a-c58d55d372b2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1822331179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49f6b1eaa6ba4ff2bc4b783ce5d3d0e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f451ea1-30", "ovs_interfaceid": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.330562] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1290.349546] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1290.429669] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.602920] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775291, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.704658] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Successfully updated port: f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1290.733407] env[62508]: DEBUG nova.scheduler.client.report [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1290.831330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Releasing lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.831687] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Instance network_info: |[{"id": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "address": "fa:16:3e:a5:54:39", "network": {"id": "84fd8102-1459-44a5-857a-c58d55d372b2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1822331179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49f6b1eaa6ba4ff2bc4b783ce5d3d0e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f451ea1-30", "ovs_interfaceid": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1290.832342] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:54:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f451ea1-30cb-4db4-a325-8e1ef277ec13', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1290.842257] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Creating folder: Project (49f6b1eaa6ba4ff2bc4b783ce5d3d0e1). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1290.846981] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30f0aa54-015c-4909-b6df-aae234c4d61a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.860317] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Created folder: Project (49f6b1eaa6ba4ff2bc4b783ce5d3d0e1) in parent group-v368536. [ 1290.860518] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Creating folder: Instances. Parent ref: group-v368549. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1290.861322] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8dbfeaf-6006-4504-a3ca-fc788ef5a3c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.871602] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Created folder: Instances in parent group-v368549. [ 1290.871914] env[62508]: DEBUG oslo.service.loopingcall [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1290.872093] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1290.872304] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2a2ed17-18e0-415d-b08b-de476a45fa97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.889249] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.894706] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1290.894706] env[62508]: value = "task-1775294" [ 1290.894706] env[62508]: _type = "Task" [ 1290.894706] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.908181] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775294, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.101184] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63211} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.103118] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b182d3aa-a4de-4879-ab36-2cb51472158a/b182d3aa-a4de-4879-ab36-2cb51472158a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1291.103118] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1291.103118] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf9c960a-0aa7-428a-b8e2-9bcc69a8759b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.111434] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1291.111434] env[62508]: value = "task-1775295" [ 1291.111434] env[62508]: _type = "Task" [ 1291.111434] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.121962] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.208436] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.208436] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.208671] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1291.242593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.918s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.242593] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1291.245164] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.880s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.250024] env[62508]: INFO nova.compute.claims [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.285572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Acquiring lock "fb7519c4-0254-4831-81f3-0eed14844f2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.286647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.287338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Acquiring lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.287338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.287338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.291324] env[62508]: INFO nova.compute.manager [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Terminating instance [ 1291.292796] env[62508]: DEBUG nova.compute.manager [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1291.292796] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1291.293343] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2c8830-b0e1-40e1-bd56-f2910644c9b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.301697] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1291.301988] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ed61b0b-c483-4858-9608-d63a378caa02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.313467] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Waiting for the task: (returnval){ [ 1291.313467] env[62508]: value = "task-1775296" [ 1291.313467] env[62508]: _type = "Task" [ 1291.313467] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.325446] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Task: {'id': task-1775296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.350747] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1291.393749] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1291.393996] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1291.395433] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.395675] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1291.395885] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.395942] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1291.396155] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1291.396322] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1291.396489] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1291.396655] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1291.396826] env[62508]: DEBUG nova.virt.hardware [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1291.399888] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31744f5d-d290-4d49-8e7c-6081fc418b9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.420199] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef4553d-be70-4a80-b085-2e77ed793db5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.426920] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775294, 'name': CreateVM_Task, 'duration_secs': 0.489766} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.427907] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1291.428941] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.429060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.429414] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1291.429722] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-256abdba-a4b0-40b4-b647-8b5c7730a430 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.444364] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1291.452950] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Creating folder: Project (6df0446a82d94c91936c6ea7440a23d5). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1291.453892] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-650145f6-b1ad-428a-b598-31f390964d8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.457806] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1291.457806] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278a7e1-0635-078f-20c2-83467e91d665" [ 1291.457806] env[62508]: _type = "Task" [ 1291.457806] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.469026] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278a7e1-0635-078f-20c2-83467e91d665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.471586] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Created folder: Project (6df0446a82d94c91936c6ea7440a23d5) in parent group-v368536. [ 1291.471586] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Creating folder: Instances. Parent ref: group-v368552. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1291.471586] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-291144a7-456c-4a1d-8f8f-8b2e22c015b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.479489] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Created folder: Instances in parent group-v368552. [ 1291.479786] env[62508]: DEBUG oslo.service.loopingcall [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1291.479993] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1291.480219] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65ffc2d9-ad58-48f5-b35c-3a8aa9c0038d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.499912] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1291.499912] env[62508]: value = "task-1775299" [ 1291.499912] env[62508]: _type = "Task" [ 1291.499912] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.509487] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775299, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.586305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.586431] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.626247] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07004} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.626247] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1291.626247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be42ee5-4c12-4e26-8e76-9678c2b3fa9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.647613] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] b182d3aa-a4de-4879-ab36-2cb51472158a/b182d3aa-a4de-4879-ab36-2cb51472158a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1291.647858] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b34b7b25-a567-4483-a1cd-cab8c4bbd2e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.672885] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1291.672885] env[62508]: value = "task-1775300" [ 1291.672885] env[62508]: _type = "Task" [ 1291.672885] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.685470] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.742752] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Successfully updated port: 69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1291.751613] env[62508]: DEBUG nova.compute.utils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1291.756269] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1291.756668] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1291.764345] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1291.826227] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Task: {'id': task-1775296, 'name': PowerOffVM_Task, 'duration_secs': 0.404213} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.828030] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1291.828030] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1291.828030] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e52e4c77-8628-4ad3-88d2-e9b3f3922a71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.884676] env[62508]: INFO nova.compute.manager [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Rebuilding instance [ 1291.909953] env[62508]: DEBUG nova.policy [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4db5bde91fa84db9aec4e0a6873be049', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4a984ff530a41058c97abb746a4efee', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1291.917593] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1291.918361] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1291.918361] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Deleting the datastore file [datastore1] fb7519c4-0254-4831-81f3-0eed14844f2d {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.921799] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8aaf2068-fb7e-4d4b-9436-fa63dddd1350 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.930138] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Waiting for the task: (returnval){ [ 1291.930138] env[62508]: value = "task-1775302" [ 1291.930138] env[62508]: _type = "Task" [ 1291.930138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.940518] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Task: {'id': task-1775302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.955441] env[62508]: DEBUG nova.compute.manager [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1291.957248] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242a8488-740c-42da-8302-9c60285b7f4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.979144] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278a7e1-0635-078f-20c2-83467e91d665, 'name': SearchDatastore_Task, 'duration_secs': 0.021684} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.979817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.982411] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1291.982411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.982411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.982411] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1291.982411] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55ea3393-a652-4dd1-8f6e-511f8871d8b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.993451] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1291.993917] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1291.994432] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8854227d-fefd-4fe0-b45d-9a7ff5d3d5db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.002238] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1292.002238] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520c6eca-9708-7bee-c39a-612337b0c903" [ 1292.002238] env[62508]: _type = "Task" [ 1292.002238] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.019126] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775299, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.024919] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520c6eca-9708-7bee-c39a-612337b0c903, 'name': SearchDatastore_Task, 'duration_secs': 0.01426} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.024919] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9a87aca-0cc0-447a-b2e8-d7f17b498ea7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.033201] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1292.033201] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227908b-22ac-57fd-99af-318767bfbf93" [ 1292.033201] env[62508]: _type = "Task" [ 1292.033201] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.040172] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227908b-22ac-57fd-99af-318767bfbf93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.155842] env[62508]: DEBUG nova.network.neutron [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.183402] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.248017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.248017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquired lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.248017] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1292.256819] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1292.442528] env[62508]: DEBUG oslo_vmware.api [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Task: {'id': task-1775302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216392} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.442528] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.442528] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1292.442528] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1292.442528] env[62508]: INFO nova.compute.manager [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1292.442913] env[62508]: DEBUG oslo.service.loopingcall [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.442913] env[62508]: DEBUG nova.compute.manager [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1292.442913] env[62508]: DEBUG nova.network.neutron [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1292.482425] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1292.482721] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-262c0c42-97a6-4281-9cfa-c918d92d5610 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.494819] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1292.494819] env[62508]: value = "task-1775303" [ 1292.494819] env[62508]: _type = "Task" [ 1292.494819] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.507839] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.519757] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775299, 'name': CreateVM_Task, 'duration_secs': 0.609897} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.520093] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1292.521867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.521867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.521867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1292.521867] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-567314e0-a537-4832-b464-43fd7d626dde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.527953] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1292.527953] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52601c5e-00a6-013d-7c9b-3b97f679984c" [ 1292.527953] env[62508]: _type = "Task" [ 1292.527953] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.542378] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52601c5e-00a6-013d-7c9b-3b97f679984c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.546988] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227908b-22ac-57fd-99af-318767bfbf93, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.549205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.549499] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 827b0887-2132-49af-bcce-cedc7237245d/827b0887-2132-49af-bcce-cedc7237245d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1292.550066] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d883a18b-8a56-4344-be75-ec2defd09566 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.558773] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1292.558773] env[62508]: value = "task-1775304" [ 1292.558773] env[62508]: _type = "Task" [ 1292.558773] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.569190] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775304, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.617420] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aa795e-9663-4d0d-89cd-a5f18e95e6d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.625791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf414e7-4a73-4b13-8f85-7a798a51ba6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.671315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.671646] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Instance network_info: |[{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1292.672882] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:3b:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7f2eb4c-dab6-44e0-8f5e-7013ddd13683', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1292.680525] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Creating folder: Project (bf3fdb58653a47149b5ae7316424d235). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1292.681356] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b622ef-5ca9-49c9-983c-0ead3b69e5ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.684518] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b24844a1-122e-4422-9226-1df8c8d60886 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.695404] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7380caf-8eee-484e-ae04-d493dc45f625 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.702067] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Created folder: Project (bf3fdb58653a47149b5ae7316424d235) in parent group-v368536. [ 1292.702266] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Creating folder: Instances. Parent ref: group-v368555. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1292.702826] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-772c4103-ff34-4667-946c-89e804dc5590 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.713168] env[62508]: DEBUG nova.compute.provider_tree [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.717834] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775300, 'name': ReconfigVM_Task, 'duration_secs': 0.556731} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.718357] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Reconfigured VM instance instance-00000004 to attach disk [datastore1] b182d3aa-a4de-4879-ab36-2cb51472158a/b182d3aa-a4de-4879-ab36-2cb51472158a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1292.718954] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-598e81fe-a2fb-4391-9fda-9d3024ed5709 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.724944] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1292.724944] env[62508]: value = "task-1775307" [ 1292.724944] env[62508]: _type = "Task" [ 1292.724944] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.726819] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Created folder: Instances in parent group-v368555. [ 1292.727054] env[62508]: DEBUG oslo.service.loopingcall [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.730809] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1292.730809] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f43f3d3-65df-4f9a-ad2b-052611ea0b62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.752774] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775307, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.754143] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1292.754143] env[62508]: value = "task-1775308" [ 1292.754143] env[62508]: _type = "Task" [ 1292.754143] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.762799] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775308, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.963333] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1293.007025] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775303, 'name': PowerOffVM_Task, 'duration_secs': 0.153925} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.007352] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1293.007573] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1293.008414] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633b7a29-9c85-4c35-98ab-a9049b8ad57c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.020238] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1293.024756] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c48ce3f5-9d45-443d-88b8-87ae1327f615 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.045150] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52601c5e-00a6-013d-7c9b-3b97f679984c, 'name': SearchDatastore_Task, 'duration_secs': 0.014373} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.046097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.046097] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1293.046434] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.046653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.046860] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.047245] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ede6872-19e3-496a-a542-a6cd8ac8025c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.052063] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1293.052347] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1293.052468] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleting the datastore file [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1293.052717] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5130d8b4-fe9d-4198-b20c-ce3d1278ba17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.060182] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1293.060182] env[62508]: value = "task-1775310" [ 1293.060182] env[62508]: _type = "Task" [ 1293.060182] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.069313] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.069702] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1293.075019] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24195d81-ac28-45fb-b809-2d2b933ed6a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.078029] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775304, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.081559] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.084873] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1293.084873] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f36292-02e7-1d43-9608-cafca986ad1c" [ 1293.084873] env[62508]: _type = "Task" [ 1293.084873] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.094325] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f36292-02e7-1d43-9608-cafca986ad1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.221367] env[62508]: DEBUG nova.scheduler.client.report [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1293.238983] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775307, 'name': Rename_Task, 'duration_secs': 0.315711} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.242008] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1293.242008] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dad7419-0b10-404e-ae6a-834f9875e6ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.246466] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1293.246466] env[62508]: value = "task-1775311" [ 1293.246466] env[62508]: _type = "Task" [ 1293.246466] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.256296] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.272964] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1293.274995] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775308, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.281734] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Successfully created port: 173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.326177] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1293.327200] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1293.328721] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.329036] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1293.330498] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.330498] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1293.330498] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1293.330498] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1293.330498] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1293.331093] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1293.331093] env[62508]: DEBUG nova.virt.hardware [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1293.331811] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b81ba3-796b-4f31-b230-a64f2a0bda30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.346235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522487d6-2e1b-489f-a071-6f88fb995320 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.408127] env[62508]: DEBUG nova.network.neutron [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updating instance_info_cache with network_info: [{"id": "69b55107-fb30-4e96-ae5d-062af1ec4850", "address": "fa:16:3e:75:a6:d6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b55107-fb", "ovs_interfaceid": "69b55107-fb30-4e96-ae5d-062af1ec4850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.477626] env[62508]: DEBUG nova.compute.manager [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Received event network-vif-plugged-69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1293.477626] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] Acquiring lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.479927] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.479927] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.479927] env[62508]: DEBUG nova.compute.manager [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] No waiting events found dispatching network-vif-plugged-69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1293.480127] env[62508]: WARNING nova.compute.manager [req-bf38e226-bd5d-4013-a2a9-4d80d1aecf23 req-88ef9b58-cce2-4f55-aa6d-27d9d4db981b service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Received unexpected event network-vif-plugged-69b55107-fb30-4e96-ae5d-062af1ec4850 for instance with vm_state building and task_state spawning. [ 1293.576876] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775304, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683786} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.580948] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 827b0887-2132-49af-bcce-cedc7237245d/827b0887-2132-49af-bcce-cedc7237245d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1293.581239] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1293.581423] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321496} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.581998] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b9c2b93-f3b2-460f-bd1c-e313f8d3af08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.586314] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1293.586314] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1293.586314] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1293.601965] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1293.601965] env[62508]: value = "task-1775312" [ 1293.601965] env[62508]: _type = "Task" [ 1293.601965] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.609386] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f36292-02e7-1d43-9608-cafca986ad1c, 'name': SearchDatastore_Task, 'duration_secs': 0.057952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.610693] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2113279-df51-4302-b6d7-8505e97bddd1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.619162] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.623120] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1293.623120] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285e587-19a8-98b7-d152-b0c4e567cea7" [ 1293.623120] env[62508]: _type = "Task" [ 1293.623120] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.634578] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285e587-19a8-98b7-d152-b0c4e567cea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.730075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.730075] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1293.731191] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.817s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.760142] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.771451] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775308, 'name': CreateVM_Task, 'duration_secs': 0.740667} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.771683] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1293.772416] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.772830] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.773184] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1293.773588] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e068f4ad-c1d7-4971-bb97-705ef0a49553 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.778131] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1293.778131] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e4f9e1-879e-69ef-6268-0153d44c6e33" [ 1293.778131] env[62508]: _type = "Task" [ 1293.778131] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.790808] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e4f9e1-879e-69ef-6268-0153d44c6e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.841815] env[62508]: DEBUG nova.network.neutron [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.921150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Releasing lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.921150] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Instance network_info: |[{"id": "69b55107-fb30-4e96-ae5d-062af1ec4850", "address": "fa:16:3e:75:a6:d6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b55107-fb", "ovs_interfaceid": "69b55107-fb30-4e96-ae5d-062af1ec4850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1293.921288] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:a6:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69b55107-fb30-4e96-ae5d-062af1ec4850', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.927138] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Creating folder: Project (3c285de053cc4d5f80df78c29ae7eaf7). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1293.927524] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce9e7864-7c72-4c89-99ff-d83082a1151f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.937524] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Created folder: Project (3c285de053cc4d5f80df78c29ae7eaf7) in parent group-v368536. [ 1293.937746] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Creating folder: Instances. Parent ref: group-v368558. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1293.937978] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-029b484e-af8a-4d28-83ba-4e354471828c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.947426] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Created folder: Instances in parent group-v368558. [ 1293.948040] env[62508]: DEBUG oslo.service.loopingcall [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.948566] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1293.949084] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-744b1be9-1105-4d91-b89e-03e4421a7b59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.977697] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.977697] env[62508]: value = "task-1775315" [ 1293.977697] env[62508]: _type = "Task" [ 1293.977697] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.987866] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775315, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.079574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.079946] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.115500] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063186} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.115783] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1294.116680] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6171cdd4-518f-44fc-acbc-d30dcaced608 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.144754] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 827b0887-2132-49af-bcce-cedc7237245d/827b0887-2132-49af-bcce-cedc7237245d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1294.149018] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8d9f1c1-938a-4a33-9ed9-80b5a155d228 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.171527] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1294.171527] env[62508]: value = "task-1775316" [ 1294.171527] env[62508]: _type = "Task" [ 1294.171527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.176423] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285e587-19a8-98b7-d152-b0c4e567cea7, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.179768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.180058] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 891fac54-2ec4-4d47-8535-a33bd9dfb804/891fac54-2ec4-4d47-8535-a33bd9dfb804.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1294.180315] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e589e16-f419-4be8-986e-ecde8bafa48e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.188497] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775316, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.190648] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1294.190648] env[62508]: value = "task-1775317" [ 1294.190648] env[62508]: _type = "Task" [ 1294.190648] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.200931] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.236440] env[62508]: DEBUG nova.compute.utils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1294.237998] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1294.258104] env[62508]: DEBUG oslo_vmware.api [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775311, 'name': PowerOnVM_Task, 'duration_secs': 0.953254} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.258104] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1294.258310] env[62508]: INFO nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Took 12.54 seconds to spawn the instance on the hypervisor. [ 1294.258484] env[62508]: DEBUG nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1294.259292] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c42030d-8776-4b60-bd22-0d951d776b7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.292799] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e4f9e1-879e-69ef-6268-0153d44c6e33, 'name': SearchDatastore_Task, 'duration_secs': 0.019234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.293125] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.293360] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1294.293665] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.293824] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.294016] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1294.294930] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b30da2ae-bfa1-4dd4-992e-b157f330c3c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.308016] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1294.308423] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1294.309393] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11373084-1b04-4341-902a-67438a90d5cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.318900] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1294.318900] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521303bc-f9fb-a89d-efe9-5613a5d3bcaa" [ 1294.318900] env[62508]: _type = "Task" [ 1294.318900] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.328590] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521303bc-f9fb-a89d-efe9-5613a5d3bcaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.345701] env[62508]: INFO nova.compute.manager [-] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Took 1.90 seconds to deallocate network for instance. [ 1294.474498] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Received event network-changed-7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1294.474788] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Refreshing instance network info cache due to event network-changed-7f451ea1-30cb-4db4-a325-8e1ef277ec13. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1294.475143] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Acquiring lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.475243] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Acquired lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.475355] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Refreshing network info cache for port 7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1294.489675] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775315, 'name': CreateVM_Task, 'duration_secs': 0.507328} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.489945] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1294.490449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.490931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.490994] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1294.492548] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36835fe4-734b-4315-baae-545e70fb7cdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.498809] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1294.498809] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5256c589-dbb5-d2d5-0521-455eee6058bb" [ 1294.498809] env[62508]: _type = "Task" [ 1294.498809] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.507340] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5256c589-dbb5-d2d5-0521-455eee6058bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.625377] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1294.625559] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1294.625715] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1294.625898] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1294.626061] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1294.626272] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1294.626490] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1294.626647] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1294.626812] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1294.627009] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1294.627205] env[62508]: DEBUG nova.virt.hardware [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1294.629170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434a1b89-03fc-40e2-86b2-d3c525d73221 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.638453] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951e1e50-41ca-4012-9444-70525b22a1fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.659320] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1294.665682] env[62508]: DEBUG oslo.service.loopingcall [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1294.666068] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1294.666356] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-678678f8-4ca2-4d42-99cf-8c24c2f2d7e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.691595] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775316, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.700041] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1294.700041] env[62508]: value = "task-1775318" [ 1294.700041] env[62508]: _type = "Task" [ 1294.700041] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.709727] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775317, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.716969] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775318, 'name': CreateVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.745578] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1294.788111] env[62508]: INFO nova.compute.manager [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Took 22.22 seconds to build instance. [ 1294.791810] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance ee99ff4d-9996-4cfa-b038-7b19aef27438 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794022] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance fb7519c4-0254-4831-81f3-0eed14844f2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794022] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794022] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b182d3aa-a4de-4879-ab36-2cb51472158a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794022] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 827b0887-2132-49af-bcce-cedc7237245d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794216] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 868cf942-f348-488d-b00a-af4c8b5efda5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794216] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance d32a3a5d-17d0-4a79-b76a-371cdd170ee0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794216] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 891fac54-2ec4-4d47-8535-a33bd9dfb804 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794216] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7339c22a-05c9-4ddd-93df-0326cbe96ca4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.794329] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance fa00f4fe-3bb2-4e17-be22-8a1fda502f65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1294.830080] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521303bc-f9fb-a89d-efe9-5613a5d3bcaa, 'name': SearchDatastore_Task, 'duration_secs': 0.016346} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.832187] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f71740f4-63e8-4f07-b7c5-202514839cf4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.841817] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1294.841817] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a62d5e-81b8-865c-0b4d-d520eab7c2e5" [ 1294.841817] env[62508]: _type = "Task" [ 1294.841817] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.851135] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a62d5e-81b8-865c-0b4d-d520eab7c2e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.858735] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.016530] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5256c589-dbb5-d2d5-0521-455eee6058bb, 'name': SearchDatastore_Task, 'duration_secs': 0.035931} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.016849] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.017088] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1295.017297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.193330] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775316, 'name': ReconfigVM_Task, 'duration_secs': 0.795896} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.197065] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 827b0887-2132-49af-bcce-cedc7237245d/827b0887-2132-49af-bcce-cedc7237245d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1295.197857] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62d8d4fb-50a4-45fe-8664-9c99737d3c71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.207688] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1295.207688] env[62508]: value = "task-1775319" [ 1295.207688] env[62508]: _type = "Task" [ 1295.207688] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.208746] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69384} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.208746] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 891fac54-2ec4-4d47-8535-a33bd9dfb804/891fac54-2ec4-4d47-8535-a33bd9dfb804.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1295.208746] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1295.214127] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95ab329b-7480-4a41-a9a9-0fff6de25386 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.216821] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775318, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.222787] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775319, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.224968] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1295.224968] env[62508]: value = "task-1775320" [ 1295.224968] env[62508]: _type = "Task" [ 1295.224968] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.238881] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.284553] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Updated VIF entry in instance network info cache for port 7f451ea1-30cb-4db4-a325-8e1ef277ec13. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1295.284553] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Updating instance_info_cache with network_info: [{"id": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "address": "fa:16:3e:a5:54:39", "network": {"id": "84fd8102-1459-44a5-857a-c58d55d372b2", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1822331179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "49f6b1eaa6ba4ff2bc4b783ce5d3d0e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f451ea1-30", "ovs_interfaceid": "7f451ea1-30cb-4db4-a325-8e1ef277ec13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.290337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c311f9a3-18c3-447c-a94d-454fc93020d5 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.735s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.297263] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 46a524e2-93b0-4726-812f-98e08b6ba0b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1295.352931] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a62d5e-81b8-865c-0b4d-d520eab7c2e5, 'name': SearchDatastore_Task, 'duration_secs': 0.02648} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.353065] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.353312] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1295.353580] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.353765] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1295.353974] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97ac31ce-21a5-40e9-a5e1-de7e31e4b275 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.356301] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52ab4c43-e062-4b10-81d3-4c0708c315b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.366217] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1295.366217] env[62508]: value = "task-1775321" [ 1295.366217] env[62508]: _type = "Task" [ 1295.366217] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.370611] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1295.370907] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1295.372540] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-628374fe-723c-48fe-938f-268866fc7c8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.377714] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.381956] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1295.381956] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529339d8-713b-9e03-50e0-89d84c6c6e92" [ 1295.381956] env[62508]: _type = "Task" [ 1295.381956] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.388717] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529339d8-713b-9e03-50e0-89d84c6c6e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.711422] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775318, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.722770] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775319, 'name': Rename_Task, 'duration_secs': 0.432005} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.723351] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1295.723668] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c93078aa-a484-4286-887a-ebe18b9fc508 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.735919] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255804} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.737363] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.737973] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1295.737973] env[62508]: value = "task-1775322" [ 1295.737973] env[62508]: _type = "Task" [ 1295.737973] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.739142] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3cbb5c-2a05-488f-b84b-c79a8a4bc6d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.754285] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.774410] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 891fac54-2ec4-4d47-8535-a33bd9dfb804/891fac54-2ec4-4d47-8535-a33bd9dfb804.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.774410] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1295.776663] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7de9eb5-3339-4d76-80a0-5bed9934a48e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.795988] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Releasing lock "refresh_cache-827b0887-2132-49af-bcce-cedc7237245d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.796427] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Received event network-vif-plugged-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1295.797751] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.800405] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.800405] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.800405] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] No waiting events found dispatching network-vif-plugged-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1295.800405] env[62508]: WARNING nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Received unexpected event network-vif-plugged-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 for instance with vm_state building and task_state spawning. [ 1295.800405] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Received event network-changed-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1295.800579] env[62508]: DEBUG nova.compute.manager [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Refreshing instance network info cache due to event network-changed-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1295.800579] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.800579] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.800579] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Refreshing network info cache for port f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1295.801827] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1295.809218] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1295.818151] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1295.818151] env[62508]: value = "task-1775323" [ 1295.818151] env[62508]: _type = "Task" [ 1295.818151] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.830040] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1295.832616] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1295.832616] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1295.832616] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1295.832616] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1295.832616] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1295.832845] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1295.832845] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1295.832845] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1295.832845] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1295.832845] env[62508]: DEBUG nova.virt.hardware [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1295.834041] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d670e3d-da5c-461b-9566-556e9c58eb3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.847273] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1c32c7-4f96-452b-98b1-7e1e7be815ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.852259] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.865218] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1295.871565] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Creating folder: Project (76a51c36f66948099d5e62fca02c5335). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.872299] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ea384d5-ce97-453c-be12-55267b38b76a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.888159] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.892448] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Created folder: Project (76a51c36f66948099d5e62fca02c5335) in parent group-v368536. [ 1295.892448] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Creating folder: Instances. Parent ref: group-v368562. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.897237] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb31dec5-c936-4289-931f-0e31d6267b44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.898309] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529339d8-713b-9e03-50e0-89d84c6c6e92, 'name': SearchDatastore_Task, 'duration_secs': 0.026962} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.899713] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41fbb15e-1f04-419f-8118-3e37a23dbd87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.910639] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1295.910639] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e596f-4213-b7dd-ea1f-636678f81cf7" [ 1295.910639] env[62508]: _type = "Task" [ 1295.910639] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.910639] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Created folder: Instances in parent group-v368562. [ 1295.910639] env[62508]: DEBUG oslo.service.loopingcall [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1295.913331] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1295.914235] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6f24705-773c-400c-a0ab-3dbdeec0e679 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.933493] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e596f-4213-b7dd-ea1f-636678f81cf7, 'name': SearchDatastore_Task, 'duration_secs': 0.021948} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.934907] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.935201] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d32a3a5d-17d0-4a79-b76a-371cdd170ee0/d32a3a5d-17d0-4a79-b76a-371cdd170ee0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1295.936142] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1295.936142] env[62508]: value = "task-1775326" [ 1295.936142] env[62508]: _type = "Task" [ 1295.936142] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.936142] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66076e05-a454-4fa4-af8d-6f25d92cc437 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.947235] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775326, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.948888] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1295.948888] env[62508]: value = "task-1775327" [ 1295.948888] env[62508]: _type = "Task" [ 1295.948888] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.958500] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.214739] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775318, 'name': CreateVM_Task, 'duration_secs': 1.408778} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.215058] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1296.215468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.215656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.216112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1296.216721] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef8a827d-ad7e-4279-908f-70e00817cf77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.224586] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1296.224586] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fad624-507e-3d98-4f72-40be075a3999" [ 1296.224586] env[62508]: _type = "Task" [ 1296.224586] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.236540] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fad624-507e-3d98-4f72-40be075a3999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.262572] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775322, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.315683] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 575ea3dc-850d-4078-8678-41b3c40a4c27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1296.340479] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.358615] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.368594] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Successfully updated port: 173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1296.389348] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.984689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.389348] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1296.389684] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1296.389801] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3bb9133-5c87-4d6a-8e40-cc3ad6f793da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.402862] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1296.402862] env[62508]: value = "task-1775328" [ 1296.402862] env[62508]: _type = "Task" [ 1296.402862] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.417154] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.453183] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775326, 'name': CreateVM_Task, 'duration_secs': 0.43497} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.457045] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1296.457497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.465604] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775327, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.739703] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fad624-507e-3d98-4f72-40be075a3999, 'name': SearchDatastore_Task, 'duration_secs': 0.05898} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.740227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.740346] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1296.740768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.740768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.741042] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.741261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.741574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1296.741804] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9d28145-eadd-4a3d-8b2f-aff48ded9d75 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.744202] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934c49d8-ac7c-40af-aafa-94fdcd00eab0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.757412] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775322, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.760167] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1296.760167] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5246fda5-03a3-c59c-158b-6c8e4f059da1" [ 1296.760167] env[62508]: _type = "Task" [ 1296.760167] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.765476] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.765580] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1296.767460] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c560b58-cf78-4296-93bc-1c605b332bec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.776270] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5246fda5-03a3-c59c-158b-6c8e4f059da1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.782347] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1296.782347] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b602ea-d6d6-7f61-19a4-260f7f154eab" [ 1296.782347] env[62508]: _type = "Task" [ 1296.782347] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.791390] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b602ea-d6d6-7f61-19a4-260f7f154eab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.831139] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 192995e7-82f5-41be-990d-d91b93f981e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1296.844691] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775323, 'name': ReconfigVM_Task, 'duration_secs': 0.635963} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.845037] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 891fac54-2ec4-4d47-8535-a33bd9dfb804/891fac54-2ec4-4d47-8535-a33bd9dfb804.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.845749] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e99fb3a-4476-43c9-bb2b-c7c6a294344c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.854365] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1296.854365] env[62508]: value = "task-1775329" [ 1296.854365] env[62508]: _type = "Task" [ 1296.854365] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.870013] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775329, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.875867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.875867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquired lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.875867] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.877492] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updated VIF entry in instance network info cache for port f7f2eb4c-dab6-44e0-8f5e-7013ddd13683. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.877492] env[62508]: DEBUG nova.network.neutron [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.913997] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105273} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.914561] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1296.915222] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c8165b-5a98-44d7-a006-5fc9d891c126 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.942685] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1296.944725] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f511baa8-ef77-4fc5-9cab-ba14037186ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.966043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.966043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.976955] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.865149} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.979896] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d32a3a5d-17d0-4a79-b76a-371cdd170ee0/d32a3a5d-17d0-4a79-b76a-371cdd170ee0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1296.980266] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1296.981606] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1296.981606] env[62508]: value = "task-1775330" [ 1296.981606] env[62508]: _type = "Task" [ 1296.981606] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.981606] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60dc13c6-7714-4136-b6b0-27339399ab35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.995063] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775330, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.997047] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1296.997047] env[62508]: value = "task-1775331" [ 1296.997047] env[62508]: _type = "Task" [ 1296.997047] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.008144] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.186572] env[62508]: DEBUG nova.compute.manager [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Received event network-changed-69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1297.186889] env[62508]: DEBUG nova.compute.manager [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Refreshing instance network info cache due to event network-changed-69b55107-fb30-4e96-ae5d-062af1ec4850. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1297.187216] env[62508]: DEBUG oslo_concurrency.lockutils [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] Acquiring lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.187422] env[62508]: DEBUG oslo_concurrency.lockutils [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] Acquired lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.187641] env[62508]: DEBUG nova.network.neutron [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Refreshing network info cache for port 69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.261986] env[62508]: DEBUG oslo_vmware.api [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775322, 'name': PowerOnVM_Task, 'duration_secs': 1.483895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.268483] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1297.268483] env[62508]: INFO nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Took 13.10 seconds to spawn the instance on the hypervisor. [ 1297.268684] env[62508]: DEBUG nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1297.270151] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0735676f-a55a-4d47-aeb1-b6c6e43c8717 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.281574] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5246fda5-03a3-c59c-158b-6c8e4f059da1, 'name': SearchDatastore_Task, 'duration_secs': 0.036763} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.284372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.284662] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1297.286371] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.300902] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b602ea-d6d6-7f61-19a4-260f7f154eab, 'name': SearchDatastore_Task, 'duration_secs': 0.01248} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.301637] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aed74f6a-362b-4b6f-9c05-87daa0f3bbbb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.309138] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1297.309138] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e4496-93b5-9beb-41f1-da785bf8a3b3" [ 1297.309138] env[62508]: _type = "Task" [ 1297.309138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.320436] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e4496-93b5-9beb-41f1-da785bf8a3b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.339730] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 03552483-a365-4d25-94bc-ea9b38ee6cd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1297.372913] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775329, 'name': Rename_Task, 'duration_secs': 0.192969} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.372913] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1297.372913] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40956d31-ef60-4017-948e-0789aa1b542c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.386741] env[62508]: DEBUG oslo_concurrency.lockutils [req-fdd23a9d-270d-410a-b364-3a8b1ffccacc req-6caae63d-be5f-4938-9649-9a823d0f7d8e service nova] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.387406] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1297.387406] env[62508]: value = "task-1775332" [ 1297.387406] env[62508]: _type = "Task" [ 1297.387406] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.400078] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.450161] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1297.510414] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.522553] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098058} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.522908] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1297.527296] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cfc454-904b-47d1-a265-c9bc9f4a6eb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.550417] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d32a3a5d-17d0-4a79-b76a-371cdd170ee0/d32a3a5d-17d0-4a79-b76a-371cdd170ee0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1297.550575] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bee2147-ef51-4404-9e1e-1343306dd88e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.576628] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1297.576628] env[62508]: value = "task-1775333" [ 1297.576628] env[62508]: _type = "Task" [ 1297.576628] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.593556] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.619714] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.620051] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.727218] env[62508]: DEBUG nova.compute.manager [req-f53ce954-4a89-47d5-b08f-9e3d00782f6d req-eb34f7e5-90ee-4cf8-8347-70b7e2803468 service nova] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Received event network-vif-deleted-6f2165d0-7bca-4b3b-8351-aa81a2b3af95 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1297.801152] env[62508]: INFO nova.compute.manager [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Took 24.99 seconds to build instance. [ 1297.833933] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e4496-93b5-9beb-41f1-da785bf8a3b3, 'name': SearchDatastore_Task, 'duration_secs': 0.055526} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.834851] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.834851] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1297.835078] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.835413] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.836168] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-435019ec-92ff-4baa-b27c-69a66aabe4f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.838778] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d984fa7e-2d2c-4d12-ab3e-0c7261cab6a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.845313] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7d23d8f0-d7a9-4236-ad28-208e77b72138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1297.850822] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1297.850822] env[62508]: value = "task-1775334" [ 1297.850822] env[62508]: _type = "Task" [ 1297.850822] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.859440] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.859440] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1297.860341] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03fc9f64-b0ab-4670-a0d6-96b3b3f8b26f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.867235] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.871984] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1297.871984] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f419fe-5632-4bb9-e2d9-db6b36dba335" [ 1297.871984] env[62508]: _type = "Task" [ 1297.871984] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.892240] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f419fe-5632-4bb9-e2d9-db6b36dba335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.903176] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775332, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.908745] env[62508]: DEBUG nova.network.neutron [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Updating instance_info_cache with network_info: [{"id": "173aba5f-fabb-4f48-899e-9fc4716084fa", "address": "fa:16:3e:a8:2b:ab", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.37", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173aba5f-fa", "ovs_interfaceid": "173aba5f-fabb-4f48-899e-9fc4716084fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.995390] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775330, 'name': ReconfigVM_Task, 'duration_secs': 0.869279} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.995994] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1297.996669] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9235677f-5a3c-4f75-9ba6-60ea05aeb081 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.005538] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1298.005538] env[62508]: value = "task-1775335" [ 1298.005538] env[62508]: _type = "Task" [ 1298.005538] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.018337] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775335, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.091821] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.154121] env[62508]: DEBUG nova.network.neutron [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updated VIF entry in instance network info cache for port 69b55107-fb30-4e96-ae5d-062af1ec4850. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.154393] env[62508]: DEBUG nova.network.neutron [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updating instance_info_cache with network_info: [{"id": "69b55107-fb30-4e96-ae5d-062af1ec4850", "address": "fa:16:3e:75:a6:d6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b55107-fb", "ovs_interfaceid": "69b55107-fb30-4e96-ae5d-062af1ec4850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.165634] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.165884] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.304188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e53d6e3-b29a-443c-8895-ea68e10f9dbe tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.504s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.348437] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b911f25d-711b-411e-bb2d-2e59386ff2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1298.365072] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775334, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.386753] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f419fe-5632-4bb9-e2d9-db6b36dba335, 'name': SearchDatastore_Task, 'duration_secs': 0.019637} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.388093] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b3e5794-dbc1-4b5e-84d2-cc55655010a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.403787] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1298.403787] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a2e39-41a9-fb27-e291-1ea0de362470" [ 1298.403787] env[62508]: _type = "Task" [ 1298.403787] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.407649] env[62508]: DEBUG oslo_vmware.api [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775332, 'name': PowerOnVM_Task, 'duration_secs': 0.610712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.411845] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1298.412125] env[62508]: INFO nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Took 7.06 seconds to spawn the instance on the hypervisor. [ 1298.412313] env[62508]: DEBUG nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1298.412905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Releasing lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.413201] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Instance network_info: |[{"id": "173aba5f-fabb-4f48-899e-9fc4716084fa", "address": "fa:16:3e:a8:2b:ab", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.37", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173aba5f-fa", "ovs_interfaceid": "173aba5f-fabb-4f48-899e-9fc4716084fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1298.414061] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3f4562-0c8c-4415-949a-4172be3dfa00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.417148] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:2b:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '173aba5f-fabb-4f48-899e-9fc4716084fa', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1298.427149] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Creating folder: Project (d4a984ff530a41058c97abb746a4efee). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1298.427149] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9f71c5e-66fe-4811-baac-3af375d6af5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.433460] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a2e39-41a9-fb27-e291-1ea0de362470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.447738] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Created folder: Project (d4a984ff530a41058c97abb746a4efee) in parent group-v368536. [ 1298.447738] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Creating folder: Instances. Parent ref: group-v368565. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1298.448512] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2230d4a-d8ad-4159-a05b-d1a01d70a506 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.461278] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Created folder: Instances in parent group-v368565. [ 1298.461752] env[62508]: DEBUG oslo.service.loopingcall [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1298.462604] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1298.462604] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9d60447-9800-4288-aeb2-77932bbf04ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.491190] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1298.491190] env[62508]: value = "task-1775338" [ 1298.491190] env[62508]: _type = "Task" [ 1298.491190] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.500765] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775338, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.517368] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775335, 'name': Rename_Task, 'duration_secs': 0.17987} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.517368] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1298.517545] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f90cad1f-0432-4529-ac45-9c42ac865f46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.526032] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1298.526032] env[62508]: value = "task-1775339" [ 1298.526032] env[62508]: _type = "Task" [ 1298.526032] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.535846] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.591875] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775333, 'name': ReconfigVM_Task, 'duration_secs': 0.585578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.592054] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d32a3a5d-17d0-4a79-b76a-371cdd170ee0/d32a3a5d-17d0-4a79-b76a-371cdd170ee0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1298.592741] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-295b25e4-bf02-4362-819a-2109d261e1ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.600927] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1298.600927] env[62508]: value = "task-1775340" [ 1298.600927] env[62508]: _type = "Task" [ 1298.600927] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.612017] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775340, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.657552] env[62508]: DEBUG oslo_concurrency.lockutils [req-593470de-e70b-4f71-aa11-864e7ea25a64 req-515a07d0-71a1-4a46-9e43-312306b60007 service nova] Releasing lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.808046] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1298.855021] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1298.855021] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1298.855021] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1298.872312] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700621} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.872582] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.872791] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.873058] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7fc517e-3533-42de-bf57-2b1d011d7be8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.890149] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1298.890149] env[62508]: value = "task-1775341" [ 1298.890149] env[62508]: _type = "Task" [ 1298.890149] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.904743] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775341, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.923358] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a2e39-41a9-fb27-e291-1ea0de362470, 'name': SearchDatastore_Task, 'duration_secs': 0.06373} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.923793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.924139] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fa00f4fe-3bb2-4e17-be22-8a1fda502f65/fa00f4fe-3bb2-4e17-be22-8a1fda502f65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1298.924438] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ba3086b-45d0-4a27-be0d-52ff0bbdbdfe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.934601] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1298.934601] env[62508]: value = "task-1775342" [ 1298.934601] env[62508]: _type = "Task" [ 1298.934601] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.948720] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.956266] env[62508]: INFO nova.compute.manager [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Took 19.09 seconds to build instance. [ 1299.008387] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775338, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.040195] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775339, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.117790] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775340, 'name': Rename_Task, 'duration_secs': 0.33215} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.121368] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1299.122107] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cae6edfd-eeaa-4fa4-8a66-ac5fc88fe5c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.130871] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1299.130871] env[62508]: value = "task-1775343" [ 1299.130871] env[62508]: _type = "Task" [ 1299.130871] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.147075] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.318421] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0e9f09-1f0e-42f4-accb-74532365da1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.331707] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74652d2d-d9cc-45aa-8ba6-6a61538377fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.338019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.373320] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f55f54d-b12f-4d4f-b34c-05287d75b456 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.392052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89189c95-19fe-4c11-890b-736436071fd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.416280] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775341, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.430826] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1299.432541] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1299.434899] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f201e8e-2281-4798-86ce-ee32dcf077fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.448860] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775342, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.466139] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.467339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a09f6508-1c29-47a9-9f5e-4c0919cd5ef4 tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.616s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.467888] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df788059-7c6d-4fae-866b-b7c1706b64e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.504942] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1299.504942] env[62508]: value = "task-1775344" [ 1299.504942] env[62508]: _type = "Task" [ 1299.504942] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.511045] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775338, 'name': CreateVM_Task, 'duration_secs': 0.662122} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.511628] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1299.512356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1299.512508] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.512862] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1299.513457] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-608a3c52-b448-44e5-b7cd-9d121254f7af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.519526] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.524373] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1299.524373] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ee0ef8-ee3a-ecb9-ddd5-46b7e3e15f9d" [ 1299.524373] env[62508]: _type = "Task" [ 1299.524373] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.538354] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ee0ef8-ee3a-ecb9-ddd5-46b7e3e15f9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.541725] env[62508]: DEBUG oslo_vmware.api [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775339, 'name': PowerOnVM_Task, 'duration_secs': 0.912243} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.542018] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1299.542240] env[62508]: INFO nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Took 13.09 seconds to spawn the instance on the hypervisor. [ 1299.542421] env[62508]: DEBUG nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1299.543289] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6ff1d3-3c2f-471e-aaea-4f4d52b24c11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.645406] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.883414] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "b182d3aa-a4de-4879-ab36-2cb51472158a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.883788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.883788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.883945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.884245] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.886531] env[62508]: INFO nova.compute.manager [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Terminating instance [ 1299.889626] env[62508]: DEBUG nova.compute.manager [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1299.889626] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1299.890588] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6c2d18-23f8-491c-9034-cedd293da143 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.902176] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.902486] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-417f30ee-360d-455a-b9f7-629a72c263c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.913031] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1299.913031] env[62508]: value = "task-1775345" [ 1299.913031] env[62508]: _type = "Task" [ 1299.913031] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.922644] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.961623] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588213} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.962121] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fa00f4fe-3bb2-4e17-be22-8a1fda502f65/fa00f4fe-3bb2-4e17-be22-8a1fda502f65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1299.962516] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1299.964020] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-264a7f65-f149-4908-8b61-948211371753 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.967103] env[62508]: ERROR nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [req-bbbdbdc2-e524-4858-8845-6835ee4658bc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bbbdbdc2-e524-4858-8845-6835ee4658bc"}]} [ 1299.975672] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1299.975672] env[62508]: value = "task-1775346" [ 1299.975672] env[62508]: _type = "Task" [ 1299.975672] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.986065] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1299.989509] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.992333] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1300.015716] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.016967] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1300.017745] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1300.033545] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1300.044596] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ee0ef8-ee3a-ecb9-ddd5-46b7e3e15f9d, 'name': SearchDatastore_Task, 'duration_secs': 0.01377} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.044955] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.045348] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1300.045642] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.045864] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.046120] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1300.046417] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3bedc24-b44e-45a7-99f3-0384ebd79c8a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.060264] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1300.060633] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1300.064496] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1300.066760] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3e3c461-5c1d-4505-9a06-4c8cc9003a82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.072920] env[62508]: INFO nova.compute.manager [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Took 26.46 seconds to build instance. [ 1300.076790] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1300.076790] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e38ac4-79d7-d5d8-c6af-54f279c1aee3" [ 1300.076790] env[62508]: _type = "Task" [ 1300.076790] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.085849] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e38ac4-79d7-d5d8-c6af-54f279c1aee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.146707] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775343, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.346223] env[62508]: DEBUG nova.compute.manager [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Received event network-vif-plugged-173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1300.346578] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Acquiring lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.346682] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.346942] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.347249] env[62508]: DEBUG nova.compute.manager [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] No waiting events found dispatching network-vif-plugged-173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1300.348079] env[62508]: WARNING nova.compute.manager [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Received unexpected event network-vif-plugged-173aba5f-fabb-4f48-899e-9fc4716084fa for instance with vm_state building and task_state spawning. [ 1300.348137] env[62508]: DEBUG nova.compute.manager [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Received event network-changed-173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1300.348311] env[62508]: DEBUG nova.compute.manager [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Refreshing instance network info cache due to event network-changed-173aba5f-fabb-4f48-899e-9fc4716084fa. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1300.348537] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Acquiring lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.348690] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Acquired lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.348855] env[62508]: DEBUG nova.network.neutron [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Refreshing network info cache for port 173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1300.428363] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775345, 'name': PowerOffVM_Task, 'duration_secs': 0.365234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.428762] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.429849] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1300.429849] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2f73654-2a5f-4f00-a049-c26faf466212 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.458785] env[62508]: DEBUG nova.compute.manager [None req-6a4166f1-2236-41b1-822b-d4abf45a7bae tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1300.458785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca98874d-d142-45a0-9c64-e82228ca81b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.489904] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103647} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.490129] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1300.495125] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75298df-493c-4f16-8607-7f21c1787b07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.534243] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] fa00f4fe-3bb2-4e17-be22-8a1fda502f65/fa00f4fe-3bb2-4e17-be22-8a1fda502f65.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1300.538382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.538382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "827b0887-2132-49af-bcce-cedc7237245d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.538382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.538382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "827b0887-2132-49af-bcce-cedc7237245d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.538681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.538681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.540882] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55d27751-b8fa-48e4-8233-7d197c6b0aa9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.559354] env[62508]: INFO nova.compute.manager [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Terminating instance [ 1300.564973] env[62508]: DEBUG nova.compute.manager [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1300.566092] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1300.566595] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ca6cde-54ba-4c41-890e-ff1d6bb265d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.575229] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775344, 'name': ReconfigVM_Task, 'duration_secs': 0.846589} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.575663] env[62508]: DEBUG oslo_concurrency.lockutils [None req-66d9e453-5871-4583-949f-cf6fdae11241 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.976s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.575981] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1300.575981] env[62508]: value = "task-1775348" [ 1300.575981] env[62508]: _type = "Task" [ 1300.575981] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.577302] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.579134] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ef41a04-ff7f-4ce3-a605-8ab97e15819a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.593404] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b786ef2-1962-45f1-9d99-691fb9e4f43e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.616794] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775348, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.616794] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1300.616794] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74ff9b0b-a6f5-47d5-95e0-6fdc15b06bb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.618305] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1300.618305] env[62508]: value = "task-1775349" [ 1300.618305] env[62508]: _type = "Task" [ 1300.618305] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.623776] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e38ac4-79d7-d5d8-c6af-54f279c1aee3, 'name': SearchDatastore_Task, 'duration_secs': 0.017569} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.629122] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8ba70e-d721-4f5c-83da-4f4792eea630 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.633729] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d14fb805-d90e-47e5-b966-9ce1af83cbc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.637471] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1300.637471] env[62508]: value = "task-1775350" [ 1300.637471] env[62508]: _type = "Task" [ 1300.637471] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.650748] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775349, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.691184] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1300.691184] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522b5e1a-e769-eae6-ad81-f7122425ecb7" [ 1300.691184] env[62508]: _type = "Task" [ 1300.691184] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.691538] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1300.691804] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1300.692121] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Deleting the datastore file [datastore1] b182d3aa-a4de-4879-ab36-2cb51472158a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1300.698617] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ee0d7d9-7a3c-4ed7-8d75-7c9ebacb4fb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.702312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfa037c-fa46-423e-8679-52c327dd1ddf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.706748] env[62508]: DEBUG oslo_vmware.api [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775343, 'name': PowerOnVM_Task, 'duration_secs': 1.109956} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.716754] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1300.717109] env[62508]: INFO nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Took 11.88 seconds to spawn the instance on the hypervisor. [ 1300.717385] env[62508]: DEBUG nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1300.718068] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.720056] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e3f412-afee-4a50-b570-b34a7874a912 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.730356] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for the task: (returnval){ [ 1300.730356] env[62508]: value = "task-1775351" [ 1300.730356] env[62508]: _type = "Task" [ 1300.730356] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.732845] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de121b87-c321-406d-b090-de2f7b23f216 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.756759] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522b5e1a-e769-eae6-ad81-f7122425ecb7, 'name': SearchDatastore_Task, 'duration_secs': 0.018106} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.758524] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.758828] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7339c22a-05c9-4ddd-93df-0326cbe96ca4/7339c22a-05c9-4ddd-93df-0326cbe96ca4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1300.759648] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80305f7b-b07a-43c6-819a-c30cb423265b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.776020] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1300.777550] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.783418] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1300.783418] env[62508]: value = "task-1775352" [ 1300.783418] env[62508]: _type = "Task" [ 1300.783418] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.791865] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.979834] env[62508]: INFO nova.compute.manager [None req-6a4166f1-2236-41b1-822b-d4abf45a7bae tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] instance snapshotting [ 1300.979834] env[62508]: DEBUG nova.objects.instance [None req-6a4166f1-2236-41b1-822b-d4abf45a7bae tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lazy-loading 'flavor' on Instance uuid 891fac54-2ec4-4d47-8535-a33bd9dfb804 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1301.093575] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1301.099415] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775348, 'name': ReconfigVM_Task, 'duration_secs': 0.405943} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.102128] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Reconfigured VM instance instance-0000000a to attach disk [datastore1] fa00f4fe-3bb2-4e17-be22-8a1fda502f65/fa00f4fe-3bb2-4e17-be22-8a1fda502f65.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1301.102972] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bcffb0c-654e-4a3e-bc2d-308770bce201 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.112330] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1301.112330] env[62508]: value = "task-1775353" [ 1301.112330] env[62508]: _type = "Task" [ 1301.112330] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.126499] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775353, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.142558] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775349, 'name': Rename_Task, 'duration_secs': 0.208574} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.146378] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1301.146609] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3df9f52d-0c41-43b8-94a3-26b0ec324348 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.153985] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775350, 'name': PowerOffVM_Task, 'duration_secs': 0.232656} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.154104] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1301.156022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1301.156022] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac3f16e3-8e4e-4f29-9ba0-4f1e1f386b33 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.158767] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1301.158767] env[62508]: value = "task-1775354" [ 1301.158767] env[62508]: _type = "Task" [ 1301.158767] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.174522] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.252658] env[62508]: DEBUG oslo_vmware.api [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Task: {'id': task-1775351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262943} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.252988] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1301.253353] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1301.253534] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1301.253793] env[62508]: INFO nova.compute.manager [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1301.254083] env[62508]: DEBUG oslo.service.loopingcall [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1301.254294] env[62508]: DEBUG nova.compute.manager [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1301.254401] env[62508]: DEBUG nova.network.neutron [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1301.266167] env[62508]: INFO nova.compute.manager [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Took 27.48 seconds to build instance. [ 1301.272572] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1301.272658] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1301.273302] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Deleting the datastore file [datastore1] 827b0887-2132-49af-bcce-cedc7237245d {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1301.273302] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bf8770e-ce40-4405-acd2-11bef297108b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.283649] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for the task: (returnval){ [ 1301.283649] env[62508]: value = "task-1775356" [ 1301.283649] env[62508]: _type = "Task" [ 1301.283649] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.297401] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.304612] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775352, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.329041] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 25 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1301.329041] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 25 to 26 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1301.329041] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1301.427481] env[62508]: DEBUG nova.network.neutron [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Updated VIF entry in instance network info cache for port 173aba5f-fabb-4f48-899e-9fc4716084fa. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.427481] env[62508]: DEBUG nova.network.neutron [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Updating instance_info_cache with network_info: [{"id": "173aba5f-fabb-4f48-899e-9fc4716084fa", "address": "fa:16:3e:a8:2b:ab", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.37", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173aba5f-fa", "ovs_interfaceid": "173aba5f-fabb-4f48-899e-9fc4716084fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.434478] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.434891] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.434951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "891fac54-2ec4-4d47-8535-a33bd9dfb804-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.437180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.437180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.437731] env[62508]: INFO nova.compute.manager [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Terminating instance [ 1301.443235] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "refresh_cache-891fac54-2ec4-4d47-8535-a33bd9dfb804" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.443235] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquired lock "refresh_cache-891fac54-2ec4-4d47-8535-a33bd9dfb804" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.443235] env[62508]: DEBUG nova.network.neutron [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1301.487905] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ad1db0-c1b2-46e1-baa9-970dbec741b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.506916] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b769a030-c7db-451b-988f-d1e2cd07977b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.622486] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775353, 'name': Rename_Task, 'duration_secs': 0.203488} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.622486] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1301.624506] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a621c25f-b487-41ce-9996-a94b00753c02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.625608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.630157] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1301.630157] env[62508]: value = "task-1775357" [ 1301.630157] env[62508]: _type = "Task" [ 1301.630157] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.643322] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.670117] env[62508]: DEBUG oslo_vmware.api [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775354, 'name': PowerOnVM_Task, 'duration_secs': 0.508403} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.670390] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.670809] env[62508]: DEBUG nova.compute.manager [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1301.671802] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91563bec-6bfc-485c-ad21-a320e8cf9803 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.768381] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c8425a10-096d-41c1-affe-21a05f6c562d tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.003s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.801153] env[62508]: DEBUG oslo_vmware.api [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Task: {'id': task-1775356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.414938} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.802412] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603368} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.802892] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1301.803216] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1301.803502] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1301.803789] env[62508]: INFO nova.compute.manager [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1301.804149] env[62508]: DEBUG oslo.service.loopingcall [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1301.804479] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7339c22a-05c9-4ddd-93df-0326cbe96ca4/7339c22a-05c9-4ddd-93df-0326cbe96ca4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1301.804866] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1301.805474] env[62508]: DEBUG nova.compute.manager [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1301.805680] env[62508]: DEBUG nova.network.neutron [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1301.807375] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d2eb7f5-42ce-4288-8134-c9889d76cace {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.815379] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1301.815379] env[62508]: value = "task-1775358" [ 1301.815379] env[62508]: _type = "Task" [ 1301.815379] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.827134] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.835223] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1301.835496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.104s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.835657] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.535s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.837803] env[62508]: INFO nova.compute.claims [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.929491] env[62508]: DEBUG oslo_concurrency.lockutils [req-31ae9a23-2995-4e9c-96b6-75127f960654 req-cd82ff30-aa23-43be-a64d-b78f8348fbe4 service nova] Releasing lock "refresh_cache-7339c22a-05c9-4ddd-93df-0326cbe96ca4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.979938] env[62508]: DEBUG nova.network.neutron [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1302.017060] env[62508]: DEBUG nova.compute.manager [None req-6a4166f1-2236-41b1-822b-d4abf45a7bae tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance disappeared during snapshot {{(pid=62508) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1302.102066] env[62508]: DEBUG nova.network.neutron [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.143311] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775357, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.194616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.221293] env[62508]: DEBUG nova.compute.manager [None req-6a4166f1-2236-41b1-822b-d4abf45a7bae tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Found 0 images (rotation: 2) {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1302.270692] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.284883] env[62508]: DEBUG nova.network.neutron [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.334891] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144638} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.334891] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1302.335469] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6756a87-09e9-476c-9e00-cb20c599c7cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.368280] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 7339c22a-05c9-4ddd-93df-0326cbe96ca4/7339c22a-05c9-4ddd-93df-0326cbe96ca4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1302.370132] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afdfe6ab-bfae-4747-8ea4-de8ff22cc872 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.397190] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1302.397190] env[62508]: value = "task-1775359" [ 1302.397190] env[62508]: _type = "Task" [ 1302.397190] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.410953] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775359, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.475465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "73452964-d690-451d-98c3-fba3c3301c6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.476818] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.607031] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Releasing lock "refresh_cache-891fac54-2ec4-4d47-8535-a33bd9dfb804" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.607031] env[62508]: DEBUG nova.compute.manager [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1302.607031] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1302.607363] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa235201-bccb-405d-8a1a-e2c2b169d640 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.615356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1302.615716] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87218f9c-5638-4bcc-b5fc-77ff42a9d949 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.621190] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1302.621190] env[62508]: value = "task-1775360" [ 1302.621190] env[62508]: _type = "Task" [ 1302.621190] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.629735] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.643575] env[62508]: DEBUG oslo_vmware.api [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775357, 'name': PowerOnVM_Task, 'duration_secs': 0.873158} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.643575] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1302.643575] env[62508]: INFO nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Took 6.87 seconds to spawn the instance on the hypervisor. [ 1302.643575] env[62508]: DEBUG nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1302.643575] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a37343d-df21-4064-97a1-b08e5d6dd5ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.756649] env[62508]: DEBUG nova.compute.manager [req-53ef30af-7b9c-4da1-b8cb-882539f6a683 req-bad61f8c-4588-4e8f-ba7a-3fca9c3accf3 service nova] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Received event network-vif-deleted-6e5092d1-840e-43a2-9714-b0ee31c608f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1302.788527] env[62508]: INFO nova.compute.manager [-] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Took 1.53 seconds to deallocate network for instance. [ 1302.798860] env[62508]: DEBUG nova.network.neutron [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.802566] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.838941] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "a226327d-11df-45e0-bef8-2337a0317c9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.839403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.839823] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.840227] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.840601] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.841692] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.842062] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.842387] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.843017] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.843338] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1302.908654] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775359, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.133903] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775360, 'name': PowerOffVM_Task, 'duration_secs': 0.129213} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.134215] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1303.134368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1303.134618] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cab05948-02b7-4191-8fef-ed517dcfa1fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.162174] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1303.162507] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1303.163198] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Deleting the datastore file [datastore1] 891fac54-2ec4-4d47-8535-a33bd9dfb804 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1303.166160] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca7d0125-1ab5-4334-8f6f-dfa08fa8b1f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.167922] env[62508]: INFO nova.compute.manager [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Took 20.83 seconds to build instance. [ 1303.174329] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for the task: (returnval){ [ 1303.174329] env[62508]: value = "task-1775362" [ 1303.174329] env[62508]: _type = "Task" [ 1303.174329] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.189286] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.250612] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17ec413-377a-43fb-81e2-5964735a4f37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.262443] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdfdc71-ba66-4785-8701-fc5eeb8834c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.301426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce846f5-7208-4e98-b369-667c992d8375 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.305480] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.305776] env[62508]: INFO nova.compute.manager [-] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Took 1.50 seconds to deallocate network for instance. [ 1303.316940] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecf922c-132e-4cec-b431-849aa4275499 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.336455] env[62508]: DEBUG nova.compute.provider_tree [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.408700] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775359, 'name': ReconfigVM_Task, 'duration_secs': 0.963889} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.409153] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 7339c22a-05c9-4ddd-93df-0326cbe96ca4/7339c22a-05c9-4ddd-93df-0326cbe96ca4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.409862] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63e4cc43-24e4-4867-9e2d-bee47791922d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.416187] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1303.416187] env[62508]: value = "task-1775363" [ 1303.416187] env[62508]: _type = "Task" [ 1303.416187] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.424728] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775363, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.470021] env[62508]: DEBUG nova.compute.manager [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Received event network-changed {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1303.470021] env[62508]: DEBUG nova.compute.manager [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Refreshing instance network info cache due to event network-changed. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1303.470021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] Acquiring lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.470021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] Acquired lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.470021] env[62508]: DEBUG nova.network.neutron [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.671871] env[62508]: DEBUG oslo_concurrency.lockutils [None req-29dc6d83-6ad0-4ea3-9b2d-82c7dce52c44 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.347s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.685240] env[62508]: DEBUG oslo_vmware.api [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Task: {'id': task-1775362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138897} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.685509] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.685689] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1303.685857] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1303.687368] env[62508]: INFO nova.compute.manager [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1303.687368] env[62508]: DEBUG oslo.service.loopingcall [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1303.687368] env[62508]: DEBUG nova.compute.manager [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1303.687827] env[62508]: DEBUG nova.network.neutron [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1303.705765] env[62508]: DEBUG nova.network.neutron [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1303.817839] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.842172] env[62508]: DEBUG nova.scheduler.client.report [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1303.927106] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775363, 'name': Rename_Task, 'duration_secs': 0.352926} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.927455] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1303.927705] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4476917d-66da-483f-bd2a-d4ce2cdce467 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.938091] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1303.938091] env[62508]: value = "task-1775364" [ 1303.938091] env[62508]: _type = "Task" [ 1303.938091] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.951232] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775364, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.175025] env[62508]: DEBUG nova.compute.manager [None req-029874cc-999a-4711-9d09-757695c73209 tempest-ServerDiagnosticsV248Test-821006467 tempest-ServerDiagnosticsV248Test-821006467-project-admin] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1304.175337] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1304.181494] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45142e5c-1ec4-4738-873e-8e0d0c25d88d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.191351] env[62508]: INFO nova.compute.manager [None req-029874cc-999a-4711-9d09-757695c73209 tempest-ServerDiagnosticsV248Test-821006467 tempest-ServerDiagnosticsV248Test-821006467-project-admin] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Retrieving diagnostics [ 1304.192475] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b630e4d-2ab4-4cbf-ab3a-a608555c76ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.227037] env[62508]: DEBUG nova.network.neutron [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.315680] env[62508]: DEBUG nova.network.neutron [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updating instance_info_cache with network_info: [{"id": "69b55107-fb30-4e96-ae5d-062af1ec4850", "address": "fa:16:3e:75:a6:d6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b55107-fb", "ovs_interfaceid": "69b55107-fb30-4e96-ae5d-062af1ec4850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.347963] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.350018] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1304.351128] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.922s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.352603] env[62508]: INFO nova.compute.claims [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.371392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "b297d642-88a7-4acc-a94d-e1cb7df81982" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.371530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.392604] env[62508]: INFO nova.compute.manager [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Rebuilding instance [ 1304.455592] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775364, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.456446] env[62508]: DEBUG nova.compute.manager [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1304.457520] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd24ea31-d329-4533-950b-884b31c940b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.707468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.733351] env[62508]: INFO nova.compute.manager [-] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Took 1.05 seconds to deallocate network for instance. [ 1304.817984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dfc4a000-f99d-4e13-a5b2-a11cef265e50 tempest-ServerExternalEventsTest-1656906466 tempest-ServerExternalEventsTest-1656906466-project] Releasing lock "refresh_cache-d32a3a5d-17d0-4a79-b76a-371cdd170ee0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.861120] env[62508]: DEBUG nova.compute.utils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1304.864309] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1304.864483] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1304.920716] env[62508]: DEBUG nova.policy [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '328d50e2315746e28cc1d5fe53eeecf6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f69e4f878f3d4174aaf6f125682087fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1304.950950] env[62508]: DEBUG oslo_vmware.api [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775364, 'name': PowerOnVM_Task, 'duration_secs': 0.558071} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.951274] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1304.951498] env[62508]: INFO nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Took 11.68 seconds to spawn the instance on the hypervisor. [ 1304.951679] env[62508]: DEBUG nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1304.952581] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222cd5f6-5b1f-4b33-b493-cbc9108a683a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.970577] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1304.970928] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28f5a7cd-afbb-4d39-822b-0ed4fca499e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.980200] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1304.980200] env[62508]: value = "task-1775365" [ 1304.980200] env[62508]: _type = "Task" [ 1304.980200] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.990884] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.072615] env[62508]: DEBUG nova.compute.manager [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1305.242019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.299817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.299817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.299817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.299817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.299977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.301938] env[62508]: INFO nova.compute.manager [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Terminating instance [ 1305.305446] env[62508]: DEBUG nova.compute.manager [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1305.305643] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1305.306548] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cace32ff-538c-413e-9b10-b586e6249d14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.314474] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1305.314733] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b224ce5-9763-437a-aaef-46d8f0c08854 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.323297] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1305.323297] env[62508]: value = "task-1775366" [ 1305.323297] env[62508]: _type = "Task" [ 1305.323297] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.331756] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.336947] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Successfully created port: a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1305.373704] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1305.478179] env[62508]: INFO nova.compute.manager [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Took 24.39 seconds to build instance. [ 1305.500122] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775365, 'name': PowerOffVM_Task, 'duration_secs': 0.193539} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.500357] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1305.500592] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1305.502070] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb406405-684e-4806-bba2-1a273d8c0c9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.510009] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1305.512593] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20c53874-4d40-4321-ab33-b31280ee839f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.538987] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1305.539803] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1305.539803] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Deleting the datastore file [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1305.539803] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-442e149c-ebf8-43a8-ac0b-5d36143649f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.550092] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1305.550092] env[62508]: value = "task-1775368" [ 1305.550092] env[62508]: _type = "Task" [ 1305.550092] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.557110] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.583654] env[62508]: DEBUG nova.compute.manager [req-7e8939c2-0da1-44f0-9fd6-5bd361436dcc req-8c067837-2424-4e4d-92ca-65f5e38b2965 service nova] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Received event network-vif-deleted-7f451ea1-30cb-4db4-a325-8e1ef277ec13 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1305.596754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.817732] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9888cfc6-5bf7-4ca0-87fa-092bb7489741 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.828399] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3349cabe-73a2-4361-b530-f81d1fe00fca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.836031] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775366, 'name': PowerOffVM_Task, 'duration_secs': 0.4233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.861569] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1305.861787] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1305.862627] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4db6689e-d165-4590-addb-ad3ef03cdd34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.864762] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc96db7-0581-4a8e-bb68-1d5a1faf0032 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.873211] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7eb329c-7616-48f2-b090-7aef9dc4820f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.893024] env[62508]: DEBUG nova.compute.provider_tree [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.949706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1305.949706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1305.949706] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Deleting the datastore file [datastore1] d32a3a5d-17d0-4a79-b76a-371cdd170ee0 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1305.949948] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd571478-d6b0-42ac-837e-c00a8dfecd7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.958628] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for the task: (returnval){ [ 1305.958628] env[62508]: value = "task-1775370" [ 1305.958628] env[62508]: _type = "Task" [ 1305.958628] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.969151] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.983343] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba20e56e-1b19-4bcf-9f82-ad23dae988f3 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.904s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.061991] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209045} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.061991] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1306.061991] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1306.061991] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1306.395700] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1306.398917] env[62508]: DEBUG nova.scheduler.client.report [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1306.470378] env[62508]: DEBUG oslo_vmware.api [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Task: {'id': task-1775370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465077} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.470580] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1306.470775] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1306.470998] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1306.471206] env[62508]: INFO nova.compute.manager [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1306.471442] env[62508]: DEBUG oslo.service.loopingcall [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.471950] env[62508]: DEBUG nova.compute.manager [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1306.471950] env[62508]: DEBUG nova.network.neutron [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1306.487106] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1306.529066] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1306.529066] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1306.529066] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.529289] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1306.530586] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.530586] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1306.530586] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1306.530586] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1306.530586] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1306.530847] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1306.530847] env[62508]: DEBUG nova.virt.hardware [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1306.532439] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41e7e86-6c40-4598-ba04-d06d20d2c1d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.545721] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d081beb-2711-4609-8578-1c9a6fcde8d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.704071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.704366] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.704583] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.704773] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.704945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.707563] env[62508]: INFO nova.compute.manager [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Terminating instance [ 1306.713102] env[62508]: DEBUG nova.compute.manager [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1306.713102] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1306.713102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08225dc5-ae5a-4ddd-b840-4ade2db232aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.720157] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1306.720157] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f8bdebe-5e80-4ba3-a4d3-8cb7bb523cea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.726593] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1306.726593] env[62508]: value = "task-1775371" [ 1306.726593] env[62508]: _type = "Task" [ 1306.726593] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.737223] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.907099] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.907099] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1306.908891] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.021s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.911274] env[62508]: INFO nova.compute.claims [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.017023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.106061] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1307.106470] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1307.106519] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1307.106673] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1307.106896] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1307.107020] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1307.107236] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1307.107502] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1307.107877] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1307.108091] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1307.108732] env[62508]: DEBUG nova.virt.hardware [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1307.109553] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aed6731-c45f-4033-9ef6-97d7015476a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.119105] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd19b3a2-afa9-45e8-bbc8-10fb14622612 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.134614] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1307.147710] env[62508]: DEBUG oslo.service.loopingcall [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.148094] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1307.148281] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fd45cb3-ac07-437e-9ee6-e22633c65d6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.166769] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1307.166769] env[62508]: value = "task-1775372" [ 1307.166769] env[62508]: _type = "Task" [ 1307.166769] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.175103] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775372, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.193040] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Successfully updated port: a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.238990] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775371, 'name': PowerOffVM_Task, 'duration_secs': 0.307438} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.239595] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1307.239697] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1307.239904] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54941652-3129-4b9a-af1d-dbce2cbb70b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.313665] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1307.313915] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1307.314234] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Deleting the datastore file [datastore1] 7339c22a-05c9-4ddd-93df-0326cbe96ca4 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1307.314381] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcbe4d22-3fa4-40e8-8641-bad21108e0e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.321903] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for the task: (returnval){ [ 1307.321903] env[62508]: value = "task-1775374" [ 1307.321903] env[62508]: _type = "Task" [ 1307.321903] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.330529] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775374, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.337891] env[62508]: DEBUG nova.compute.manager [req-2d9e44bf-3248-4543-abe2-344c0b931927 req-7b440ab4-0de7-488f-bdbf-7829e383fcc0 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Received event network-vif-deleted-69b55107-fb30-4e96-ae5d-062af1ec4850 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.338097] env[62508]: INFO nova.compute.manager [req-2d9e44bf-3248-4543-abe2-344c0b931927 req-7b440ab4-0de7-488f-bdbf-7829e383fcc0 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Neutron deleted interface 69b55107-fb30-4e96-ae5d-062af1ec4850; detaching it from the instance and deleting it from the info cache [ 1307.338274] env[62508]: DEBUG nova.network.neutron [req-2d9e44bf-3248-4543-abe2-344c0b931927 req-7b440ab4-0de7-488f-bdbf-7829e383fcc0 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.340465] env[62508]: DEBUG nova.network.neutron [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.422541] env[62508]: DEBUG nova.compute.utils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.422541] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1307.422541] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.527719] env[62508]: DEBUG nova.policy [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8c1c06958b6473a99b192ea353596d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62c36aa1e15f4bfc83e5a9e5ce22d7d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1307.681691] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775372, 'name': CreateVM_Task, 'duration_secs': 0.342354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.682106] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.683179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.683525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.684230] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1307.684766] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04144e3b-310e-4bdc-9224-b9e23e046209 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.693134] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1307.693134] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b95a4-c0b4-c4ac-93ec-6c9131af82cd" [ 1307.693134] env[62508]: _type = "Task" [ 1307.693134] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.701740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.701740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquired lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.702348] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1307.702910] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b95a4-c0b4-c4ac-93ec-6c9131af82cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.834951] env[62508]: DEBUG oslo_vmware.api [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Task: {'id': task-1775374, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.835787] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1307.835787] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1307.835787] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1307.836012] env[62508]: INFO nova.compute.manager [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1307.836087] env[62508]: DEBUG oslo.service.loopingcall [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.836276] env[62508]: DEBUG nova.compute.manager [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1307.836370] env[62508]: DEBUG nova.network.neutron [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1307.840903] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd98b9f6-4c58-411e-93fc-ef1e2195d4ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.842886] env[62508]: INFO nova.compute.manager [-] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Took 1.37 seconds to deallocate network for instance. [ 1307.852284] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff51125-791f-4653-ad28-f89b05eaeef9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.883137] env[62508]: DEBUG nova.compute.manager [req-2d9e44bf-3248-4543-abe2-344c0b931927 req-7b440ab4-0de7-488f-bdbf-7829e383fcc0 service nova] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Detach interface failed, port_id=69b55107-fb30-4e96-ae5d-062af1ec4850, reason: Instance d32a3a5d-17d0-4a79-b76a-371cdd170ee0 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1307.925725] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1307.970214] env[62508]: DEBUG nova.compute.manager [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Received event network-vif-plugged-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.970214] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Acquiring lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.970214] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.970214] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.971585] env[62508]: DEBUG nova.compute.manager [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] No waiting events found dispatching network-vif-plugged-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1307.971585] env[62508]: WARNING nova.compute.manager [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Received unexpected event network-vif-plugged-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 for instance with vm_state building and task_state spawning. [ 1307.971585] env[62508]: DEBUG nova.compute.manager [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Received event network-changed-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.971585] env[62508]: DEBUG nova.compute.manager [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Refreshing instance network info cache due to event network-changed-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1307.971585] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Acquiring lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.130893] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Successfully created port: 740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.206826] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b95a4-c0b4-c4ac-93ec-6c9131af82cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009419} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.207160] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.207367] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1308.207589] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.207727] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.207911] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1308.208781] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fc62131-deb1-41ef-a304-5c86f9ee429d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.220440] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1308.220440] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1308.221485] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e651c034-7609-4b65-a3ed-359314014925 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.229051] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1308.229051] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52900e18-07d1-eeeb-96a6-53d12f22c50a" [ 1308.229051] env[62508]: _type = "Task" [ 1308.229051] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.240690] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52900e18-07d1-eeeb-96a6-53d12f22c50a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.257461] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1308.332855] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87895e9-e2ed-4205-be27-b2cf52b7b185 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.341846] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9d5118-a876-489b-a398-d856f8da5f4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.375190] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.378640] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6ecbb3-0f07-4ecf-8473-75b6b4378124 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.386907] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f01fca-952b-4648-b262-82b76a2c93d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.402036] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1308.489951] env[62508]: DEBUG nova.network.neutron [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Updating instance_info_cache with network_info: [{"id": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "address": "fa:16:3e:d0:87:57", "network": {"id": "9ec37d22-b588-4a60-ad24-d49792e54a1d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-372505117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69e4f878f3d4174aaf6f125682087fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3db2ab9e-1244-4377-b05f-ab76003f2428", "external-id": "nsx-vlan-transportzone-199", "segmentation_id": 199, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5f76bdc-0a", "ovs_interfaceid": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.636575] env[62508]: DEBUG nova.network.neutron [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.742208] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52900e18-07d1-eeeb-96a6-53d12f22c50a, 'name': SearchDatastore_Task, 'duration_secs': 0.022921} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.743160] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b866a0f4-2adc-4c45-835f-32fe21ec2ed5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.748480] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1308.748480] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5232affd-4864-8377-b98e-b5c727b7a1c2" [ 1308.748480] env[62508]: _type = "Task" [ 1308.748480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.756606] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5232affd-4864-8377-b98e-b5c727b7a1c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.927585] env[62508]: ERROR nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [req-fb5a7718-247a-42b9-af95-8657a76a2520] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fb5a7718-247a-42b9-af95-8657a76a2520"}]} [ 1308.937310] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1308.953282] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1308.968908] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1308.968908] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1308.984420] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1308.984866] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1308.985221] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1308.988419] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1308.988419] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1308.988419] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1308.988419] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1308.988419] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1308.988612] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1308.988612] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1308.988612] env[62508]: DEBUG nova.virt.hardware [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1308.988612] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de7db77-f1bb-485b-9d1b-5fb904cf8e67 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.993127] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1308.998303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Releasing lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.998461] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Instance network_info: |[{"id": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "address": "fa:16:3e:d0:87:57", "network": {"id": "9ec37d22-b588-4a60-ad24-d49792e54a1d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-372505117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69e4f878f3d4174aaf6f125682087fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3db2ab9e-1244-4377-b05f-ab76003f2428", "external-id": "nsx-vlan-transportzone-199", "segmentation_id": 199, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5f76bdc-0a", "ovs_interfaceid": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1308.999320] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Acquired lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.999654] env[62508]: DEBUG nova.network.neutron [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Refreshing network info cache for port a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1309.000975] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:87:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3db2ab9e-1244-4377-b05f-ab76003f2428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5f76bdc-0abb-4e34-b1bc-7711f90a4c29', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.013654] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Creating folder: Project (f69e4f878f3d4174aaf6f125682087fb). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.021863] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8104c330-a893-4d0f-a590-a818aade97fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.024546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4951ed7-d91c-4d8c-9244-19187f7d9c68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.032256] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1309.044979] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Created folder: Project (f69e4f878f3d4174aaf6f125682087fb) in parent group-v368536. [ 1309.045140] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Creating folder: Instances. Parent ref: group-v368569. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.045348] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07acaf7-8e8e-4ada-9c64-2cea5cdbe4b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.056859] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Created folder: Instances in parent group-v368569. [ 1309.057256] env[62508]: DEBUG oslo.service.loopingcall [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.057339] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.057523] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eec1d897-091f-4c39-b7c7-cf206ca911d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.082160] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.082160] env[62508]: value = "task-1775377" [ 1309.082160] env[62508]: _type = "Task" [ 1309.082160] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.091484] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775377, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.140325] env[62508]: INFO nova.compute.manager [-] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Took 1.30 seconds to deallocate network for instance. [ 1309.261317] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5232affd-4864-8377-b98e-b5c727b7a1c2, 'name': SearchDatastore_Task, 'duration_secs': 0.011874} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.261317] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.261616] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1309.261692] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b369c78-0930-47a6-abae-3d6410d170af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.268266] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1309.268266] env[62508]: value = "task-1775378" [ 1309.268266] env[62508]: _type = "Task" [ 1309.268266] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.276924] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.459158] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a4441e-257d-402d-9fda-212e8ab2e20b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.469906] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421c4f11-683a-4756-bd0a-53b75991b369 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.504429] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ddd12e-b2ab-4c9f-92db-609876d5f255 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.516381] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0888022-196d-4488-a3c3-5f4af89fe437 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.532487] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1309.594200] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775377, 'name': CreateVM_Task, 'duration_secs': 0.444359} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.594376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1309.595098] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.595217] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.595522] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1309.595785] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f272586-b757-458e-b679-3f8ef5fb5c29 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.601891] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1309.601891] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239ae6c-7cfb-816f-4c25-9279a098d874" [ 1309.601891] env[62508]: _type = "Task" [ 1309.601891] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.611873] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239ae6c-7cfb-816f-4c25-9279a098d874, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.657609] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.781364] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775378, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.095720] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 33 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1310.096019] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 33 to 34 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1310.096678] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1310.113501] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239ae6c-7cfb-816f-4c25-9279a098d874, 'name': SearchDatastore_Task, 'duration_secs': 0.063712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.114274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.114274] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.114432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.114510] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.114690] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1310.114963] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05d50318-edeb-4cbe-af1d-3ce332caace3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.124343] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1310.124343] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1310.125086] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4dd388f-9fd8-438f-a438-ff4dc3ea4e2d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.131435] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1310.131435] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c56028-155c-94b0-44c9-8c436191d18c" [ 1310.131435] env[62508]: _type = "Task" [ 1310.131435] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.140428] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c56028-155c-94b0-44c9-8c436191d18c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.192413] env[62508]: DEBUG nova.network.neutron [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Updated VIF entry in instance network info cache for port a5f76bdc-0abb-4e34-b1bc-7711f90a4c29. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1310.192806] env[62508]: DEBUG nova.network.neutron [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Updating instance_info_cache with network_info: [{"id": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "address": "fa:16:3e:d0:87:57", "network": {"id": "9ec37d22-b588-4a60-ad24-d49792e54a1d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-372505117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69e4f878f3d4174aaf6f125682087fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3db2ab9e-1244-4377-b05f-ab76003f2428", "external-id": "nsx-vlan-transportzone-199", "segmentation_id": 199, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5f76bdc-0a", "ovs_interfaceid": "a5f76bdc-0abb-4e34-b1bc-7711f90a4c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.285690] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664561} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.286532] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1310.287654] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1310.287654] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-135db0ff-3762-4772-a5ed-21189498b280 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.296909] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1310.296909] env[62508]: value = "task-1775379" [ 1310.296909] env[62508]: _type = "Task" [ 1310.296909] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.307422] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.606138] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.695s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1310.606138] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1310.607840] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.749s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1310.608438] env[62508]: DEBUG nova.objects.instance [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lazy-loading 'resources' on Instance uuid fb7519c4-0254-4831-81f3-0eed14844f2d {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1310.643553] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c56028-155c-94b0-44c9-8c436191d18c, 'name': SearchDatastore_Task, 'duration_secs': 0.012397} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.644715] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-737dcc24-9dc2-444b-90f6-e2cfa1c9c56b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.651981] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1310.651981] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260c49c-82dd-deed-52d5-88f76a634b91" [ 1310.651981] env[62508]: _type = "Task" [ 1310.651981] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.662775] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260c49c-82dd-deed-52d5-88f76a634b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.696662] env[62508]: DEBUG oslo_concurrency.lockutils [req-212d6a26-f415-43a7-8b14-868507522450 req-31532912-ec93-49bf-88ce-5d0281424eaa service nova] Releasing lock "refresh_cache-46a524e2-93b0-4726-812f-98e08b6ba0b4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.811053] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076151} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.811053] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1310.811053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce8db08-fce9-4c96-b01f-5530f511941d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.846718] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1310.846718] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-762d3790-d053-494c-9889-35be1235cb0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.871801] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1310.871801] env[62508]: value = "task-1775380" [ 1310.871801] env[62508]: _type = "Task" [ 1310.871801] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.881293] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775380, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.055745] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Successfully updated port: 740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.113681] env[62508]: DEBUG nova.compute.utils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1311.115354] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1311.115449] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1311.168738] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260c49c-82dd-deed-52d5-88f76a634b91, 'name': SearchDatastore_Task, 'duration_secs': 0.011118} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.169761] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.169761] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 46a524e2-93b0-4726-812f-98e08b6ba0b4/46a524e2-93b0-4726-812f-98e08b6ba0b4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1311.169761] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7017a46b-74ad-4aa1-87a8-db222f9e265f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.184778] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1311.184778] env[62508]: value = "task-1775381" [ 1311.184778] env[62508]: _type = "Task" [ 1311.184778] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.201036] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.221724] env[62508]: DEBUG nova.policy [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49543399d8054c18bb5e8b7e843c2e52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9030159a6b094d47ad55301d5cb6c5b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1311.229941] env[62508]: DEBUG nova.compute.manager [req-5f2dd8bd-72ef-44ed-a398-68143f2e64f7 req-0101a509-73d4-4d89-b834-450b837afbbb service nova] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Received event network-vif-deleted-173aba5f-fabb-4f48-899e-9fc4716084fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1311.386609] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.565853] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.566088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.569465] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.626063] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1311.630677] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf505a1-6172-4eef-808b-7bb4900001c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.641066] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a1f7a5-3094-4796-bc69-3284ac3aeda7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.703304] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce95016-67ea-4e3b-9480-bd127a299a90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.722758] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c6197e-a98c-4bcf-b0e0-95c8969e4918 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.733086] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775381, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.750047] env[62508]: DEBUG nova.compute.provider_tree [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1311.871629] env[62508]: DEBUG nova.compute.manager [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Received event network-vif-plugged-740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1311.871859] env[62508]: DEBUG oslo_concurrency.lockutils [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] Acquiring lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.872481] env[62508]: DEBUG oslo_concurrency.lockutils [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.872722] env[62508]: DEBUG oslo_concurrency.lockutils [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.872920] env[62508]: DEBUG nova.compute.manager [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] No waiting events found dispatching network-vif-plugged-740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1311.873120] env[62508]: WARNING nova.compute.manager [req-dabfc118-c833-4fce-b0da-8075a202365d req-56d16e37-b485-4b71-9a5a-f36ada4b9ebf service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Received unexpected event network-vif-plugged-740df9be-fad5-483e-a52a-61e4af3c5da8 for instance with vm_state building and task_state spawning. [ 1311.883731] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775380, 'name': ReconfigVM_Task, 'duration_secs': 0.690854} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.884024] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b/96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.884635] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b164fe3-275a-496b-b7f0-57461b56af04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.891166] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1311.891166] env[62508]: value = "task-1775382" [ 1311.891166] env[62508]: _type = "Task" [ 1311.891166] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.901049] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775382, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.023725] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Successfully created port: a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1312.123844] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1312.209257] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775381, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.303769] env[62508]: DEBUG nova.scheduler.client.report [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 34 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1312.304154] env[62508]: DEBUG nova.compute.provider_tree [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 34 to 35 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1312.304416] env[62508]: DEBUG nova.compute.provider_tree [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1312.390638] env[62508]: DEBUG nova.network.neutron [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Updating instance_info_cache with network_info: [{"id": "740df9be-fad5-483e-a52a-61e4af3c5da8", "address": "fa:16:3e:14:cb:a2", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740df9be-fa", "ovs_interfaceid": "740df9be-fad5-483e-a52a-61e4af3c5da8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.402995] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775382, 'name': Rename_Task, 'duration_secs': 0.199159} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.407020] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1312.407020] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a46d284b-2d4f-4fc3-b1c0-0caaec97977f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.411521] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Waiting for the task: (returnval){ [ 1312.411521] env[62508]: value = "task-1775383" [ 1312.411521] env[62508]: _type = "Task" [ 1312.411521] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.421292] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.643295] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1312.679256] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1312.679577] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1312.681418] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1312.681682] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1312.682230] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1312.682464] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1312.682757] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1312.682973] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1312.683204] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1312.684130] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1312.684130] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1312.685092] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dec3f1d-2e51-4b2f-97b8-45745c01e196 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.696462] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca74f04-91a0-4a47-bdea-0afc42732cf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.715832] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775381, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.813685] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.822217] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.461s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.822217] env[62508]: INFO nova.compute.claims [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.860115] env[62508]: INFO nova.scheduler.client.report [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Deleted allocations for instance fb7519c4-0254-4831-81f3-0eed14844f2d [ 1312.896535] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.896862] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Instance network_info: |[{"id": "740df9be-fad5-483e-a52a-61e4af3c5da8", "address": "fa:16:3e:14:cb:a2", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740df9be-fa", "ovs_interfaceid": "740df9be-fad5-483e-a52a-61e4af3c5da8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1312.897903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:cb:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '740df9be-fad5-483e-a52a-61e4af3c5da8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1312.906491] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Creating folder: Project (62c36aa1e15f4bfc83e5a9e5ce22d7d5). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1312.907109] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fe3237f-f22c-4d75-85fa-b7e5c17c6739 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.918221] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Created folder: Project (62c36aa1e15f4bfc83e5a9e5ce22d7d5) in parent group-v368536. [ 1312.918620] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Creating folder: Instances. Parent ref: group-v368572. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1312.919097] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f45fd397-41c6-4fd8-bf12-ca9356cd8f65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.924475] env[62508]: DEBUG oslo_vmware.api [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Task: {'id': task-1775383, 'name': PowerOnVM_Task, 'duration_secs': 0.488855} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.924788] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.924926] env[62508]: DEBUG nova.compute.manager [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1312.926527] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd88b78-706c-4034-bffe-9cd5807369a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.931434] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Created folder: Instances in parent group-v368572. [ 1312.932852] env[62508]: DEBUG oslo.service.loopingcall [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1312.933142] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1312.937371] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72f408ee-4ff9-4e1b-b200-7cf668237cff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.960310] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1312.960310] env[62508]: value = "task-1775386" [ 1312.960310] env[62508]: _type = "Task" [ 1312.960310] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.974865] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775386, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.210842] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775381, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.735221} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.211364] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 46a524e2-93b0-4726-812f-98e08b6ba0b4/46a524e2-93b0-4726-812f-98e08b6ba0b4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1313.211364] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1313.211828] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6718d08-8e25-4a4e-82a6-138661e79164 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.219726] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1313.219726] env[62508]: value = "task-1775387" [ 1313.219726] env[62508]: _type = "Task" [ 1313.219726] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.230486] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775387, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.374369] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb9b1548-d30d-4d1e-95a2-1445adb2cd14 tempest-DeleteServersAdminTestJSON-2085453367 tempest-DeleteServersAdminTestJSON-2085453367-project-admin] Lock "fb7519c4-0254-4831-81f3-0eed14844f2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.087s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.469179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.479130] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775386, 'name': CreateVM_Task, 'duration_secs': 0.38449} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.479130] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1313.479130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.479130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.479423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1313.479423] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b03b2a9-6174-4354-baee-34675734e9a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.486138] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1313.486138] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ea378-9e5d-6fb8-fc70-38e5fe784733" [ 1313.486138] env[62508]: _type = "Task" [ 1313.486138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.493609] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ea378-9e5d-6fb8-fc70-38e5fe784733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.733677] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775387, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096934} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.734277] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1313.734869] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e16e17f-e9a6-416b-86e4-58d61be9bbbb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.759598] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 46a524e2-93b0-4726-812f-98e08b6ba0b4/46a524e2-93b0-4726-812f-98e08b6ba0b4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.760335] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4653d4d2-8808-4ef7-867a-3fa006bacb2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.782702] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1313.782702] env[62508]: value = "task-1775388" [ 1313.782702] env[62508]: _type = "Task" [ 1313.782702] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.791689] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775388, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.986365] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Successfully updated port: a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.002640] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ea378-9e5d-6fb8-fc70-38e5fe784733, 'name': SearchDatastore_Task, 'duration_secs': 0.01013} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.006215] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.006554] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1314.006790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.006875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.007145] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1314.008847] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-505b30a8-b6af-4756-b819-6bd5d36d967c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.019186] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.019412] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.020521] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee98088c-e355-4b47-a3f9-ee3ae96e308b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.029937] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1314.029937] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520d13db-91ce-4770-6640-759a24937391" [ 1314.029937] env[62508]: _type = "Task" [ 1314.029937] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.040513] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520d13db-91ce-4770-6640-759a24937391, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.298349] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775388, 'name': ReconfigVM_Task, 'duration_secs': 0.281378} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.298349] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 46a524e2-93b0-4726-812f-98e08b6ba0b4/46a524e2-93b0-4726-812f-98e08b6ba0b4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.298349] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0943e73-aae1-41ab-a59d-939dc486f548 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.299894] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d88ece3-aa33-4278-aa98-19b394d5716e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.309035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.309035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.309035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.309035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.309498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.316008] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1314.316008] env[62508]: value = "task-1775389" [ 1314.316008] env[62508]: _type = "Task" [ 1314.316008] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.316755] env[62508]: INFO nova.compute.manager [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Terminating instance [ 1314.319012] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6edbba6-0fc3-4a96-b6c9-606a9e4d4bee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.322537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "refresh_cache-96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.322696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquired lock "refresh_cache-96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.322860] env[62508]: DEBUG nova.network.neutron [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.366589] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775389, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.368974] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f524c4ca-21f1-4207-9dd1-22fdd1b9aaa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.378239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.378544] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.384400] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf9764d-eabc-4068-84b2-0cecc6157200 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.400504] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1314.496908] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.497045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.497202] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.541049] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520d13db-91ce-4770-6640-759a24937391, 'name': SearchDatastore_Task, 'duration_secs': 0.009495} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.541807] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdb1fdf7-6173-449b-9dda-3b775cfabd19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.546934] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1314.546934] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c47c03-ce7f-39f0-9d28-cbddf27018f8" [ 1314.546934] env[62508]: _type = "Task" [ 1314.546934] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.554700] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c47c03-ce7f-39f0-9d28-cbddf27018f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.649210] env[62508]: DEBUG nova.compute.manager [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Received event network-changed-740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1314.649210] env[62508]: DEBUG nova.compute.manager [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Refreshing instance network info cache due to event network-changed-740df9be-fad5-483e-a52a-61e4af3c5da8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1314.649395] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Acquiring lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.649537] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Acquired lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.649694] env[62508]: DEBUG nova.network.neutron [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Refreshing network info cache for port 740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1314.831985] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775389, 'name': Rename_Task, 'duration_secs': 0.135576} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.832510] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.833070] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec9409c6-44e6-4b25-8da9-5a7f6bc94e63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.842056] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1314.842056] env[62508]: value = "task-1775390" [ 1314.842056] env[62508]: _type = "Task" [ 1314.842056] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.848687] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.850685] env[62508]: DEBUG nova.network.neutron [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1314.924354] env[62508]: ERROR nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [req-6d5c612a-ab69-4e37-81c0-57426118a90e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6d5c612a-ab69-4e37-81c0-57426118a90e"}]} [ 1314.947411] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1314.961797] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1314.961933] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1314.975963] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1314.984363] env[62508]: DEBUG nova.network.neutron [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.994690] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1315.053258] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.067920] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c47c03-ce7f-39f0-9d28-cbddf27018f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009539} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.070435] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.071067] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e2d4c71b-1164-4c7d-9ffb-7f5489f92d32/e2d4c71b-1164-4c7d-9ffb-7f5489f92d32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1315.072482] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-098cf849-147e-4980-8b36-9716611d31d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.084362] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1315.084362] env[62508]: value = "task-1775391" [ 1315.084362] env[62508]: _type = "Task" [ 1315.084362] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.097175] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.359360] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775390, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.384731] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Updating instance_info_cache with network_info: [{"id": "a4972a5c-7644-49db-a0bb-51bc62415b49", "address": "fa:16:3e:d7:bd:c3", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4972a5c-76", "ovs_interfaceid": "a4972a5c-7644-49db-a0bb-51bc62415b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.397403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.397403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.464240] env[62508]: DEBUG nova.compute.manager [None req-34b8c817-a085-4f67-be5e-d1b233c7fb80 tempest-ServerDiagnosticsV248Test-821006467 tempest-ServerDiagnosticsV248Test-821006467-project-admin] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1315.465072] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05759c56-e446-4bd4-ae71-5268710bc86b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.472310] env[62508]: INFO nova.compute.manager [None req-34b8c817-a085-4f67-be5e-d1b233c7fb80 tempest-ServerDiagnosticsV248Test-821006467 tempest-ServerDiagnosticsV248Test-821006467-project-admin] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Retrieving diagnostics [ 1315.473392] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb01b03-3fa1-4b3b-bb27-0a554a238a31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.512325] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Releasing lock "refresh_cache-96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.512834] env[62508]: DEBUG nova.compute.manager [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1315.512985] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1315.518432] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1542c3-115d-4ba9-958f-64f6078dac8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.522065] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f716d7f-395a-42c2-a7f7-1209ccde56ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.530056] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1315.534820] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54fef846-3d7a-43f8-9e83-d1034edc5ef0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.537391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a171bc88-ab5d-461b-b792-4d2d1a15ad38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.546822] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1315.546822] env[62508]: value = "task-1775392" [ 1315.546822] env[62508]: _type = "Task" [ 1315.546822] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.578807] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc17647-0142-49a2-b71a-c582558be545 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.592535] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c5e7de-5fa4-4865-bbfb-020803186248 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.596431] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.603777] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495021} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.611642] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e2d4c71b-1164-4c7d-9ffb-7f5489f92d32/e2d4c71b-1164-4c7d-9ffb-7f5489f92d32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1315.611870] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1315.612447] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1315.613636] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-174833ca-e7bd-46a2-892e-68056817102f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.620501] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1315.620501] env[62508]: value = "task-1775393" [ 1315.620501] env[62508]: _type = "Task" [ 1315.620501] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.630560] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.758273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "ce74cbd8-b709-418b-a206-f51975fd0af1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.758273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.770365] env[62508]: DEBUG nova.network.neutron [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Updated VIF entry in instance network info cache for port 740df9be-fad5-483e-a52a-61e4af3c5da8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1315.770365] env[62508]: DEBUG nova.network.neutron [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Updating instance_info_cache with network_info: [{"id": "740df9be-fad5-483e-a52a-61e4af3c5da8", "address": "fa:16:3e:14:cb:a2", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740df9be-fa", "ovs_interfaceid": "740df9be-fad5-483e-a52a-61e4af3c5da8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.854584] env[62508]: DEBUG oslo_vmware.api [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775390, 'name': PowerOnVM_Task, 'duration_secs': 0.540855} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.854969] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.855184] env[62508]: INFO nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1315.855239] env[62508]: DEBUG nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1315.855982] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2491d1-9a67-496f-b051-d4b78e272e1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.888644] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.889739] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Instance network_info: |[{"id": "a4972a5c-7644-49db-a0bb-51bc62415b49", "address": "fa:16:3e:d7:bd:c3", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4972a5c-76", "ovs_interfaceid": "a4972a5c-7644-49db-a0bb-51bc62415b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1315.889903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:bd:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4972a5c-7644-49db-a0bb-51bc62415b49', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.898268] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Creating folder: Project (9030159a6b094d47ad55301d5cb6c5b9). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.898268] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-486554c1-1a94-4295-9196-369c45526501 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.908784] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Created folder: Project (9030159a6b094d47ad55301d5cb6c5b9) in parent group-v368536. [ 1315.909113] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Creating folder: Instances. Parent ref: group-v368575. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.910026] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4817dc2b-2399-407a-a17e-c8f6b775a002 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.918616] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Created folder: Instances in parent group-v368575. [ 1315.919096] env[62508]: DEBUG oslo.service.loopingcall [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1315.919096] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1315.919262] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21165ca4-15be-408d-b9b1-37bc95465aa5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.938952] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1315.938952] env[62508]: value = "task-1775396" [ 1315.938952] env[62508]: _type = "Task" [ 1315.938952] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.947663] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775396, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.087635] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775392, 'name': PowerOffVM_Task, 'duration_secs': 0.273732} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.088722] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1316.089127] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.091647] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10f567e7-c900-4988-a935-69c37002720d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.114146] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.115557] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.115834] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleting the datastore file [datastore1] 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.116165] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a42ff6f-cd17-46f0-9743-a6b43a54a73a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.120292] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1316.130124] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for the task: (returnval){ [ 1316.130124] env[62508]: value = "task-1775398" [ 1316.130124] env[62508]: _type = "Task" [ 1316.130124] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.137304] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070916} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.138024] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1316.139760] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c7866f-4646-4bae-ab5a-976d180c3958 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.147433] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.172370] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] e2d4c71b-1164-4c7d-9ffb-7f5489f92d32/e2d4c71b-1164-4c7d-9ffb-7f5489f92d32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1316.172370] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc76a5c6-ccd9-4842-98aa-baf2f2ed1067 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.196510] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1316.196510] env[62508]: value = "task-1775399" [ 1316.196510] env[62508]: _type = "Task" [ 1316.196510] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.206207] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.274469] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Releasing lock "refresh_cache-e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.274729] env[62508]: DEBUG nova.compute.manager [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Received event network-vif-plugged-a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1316.274930] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Acquiring lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.275159] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.275578] env[62508]: DEBUG oslo_concurrency.lockutils [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.275578] env[62508]: DEBUG nova.compute.manager [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] No waiting events found dispatching network-vif-plugged-a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1316.276087] env[62508]: WARNING nova.compute.manager [req-de0d8681-da8d-4052-81f8-a1e7ec70d652 req-243db176-acca-428c-947f-e3538b073efd service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Received unexpected event network-vif-plugged-a4972a5c-7644-49db-a0bb-51bc62415b49 for instance with vm_state building and task_state spawning. [ 1316.376282] env[62508]: INFO nova.compute.manager [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Took 26.10 seconds to build instance. [ 1316.449567] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775396, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.626916] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.808s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.627509] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1316.631723] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.294s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.633407] env[62508]: INFO nova.compute.claims [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1316.651683] env[62508]: DEBUG oslo_vmware.api [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Task: {'id': task-1775398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243456} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.651955] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1316.657090] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1316.657090] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1316.657090] env[62508]: INFO nova.compute.manager [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1316.657090] env[62508]: DEBUG oslo.service.loopingcall [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1316.657625] env[62508]: DEBUG nova.compute.manager [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1316.657721] env[62508]: DEBUG nova.network.neutron [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1316.683157] env[62508]: DEBUG nova.network.neutron [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1316.712987] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.878850] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8318b381-2452-4cdd-9068-9ebc09a854fa tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.093s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.952854] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775396, 'name': CreateVM_Task, 'duration_secs': 0.776459} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.952906] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1316.954353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.954517] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.954894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1316.957232] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b1074e4-9460-4b0d-ba2a-b96ef1bc1139 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.960134] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1316.960134] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240ba0f-4da5-72bf-f4da-04e3b05fa60b" [ 1316.960134] env[62508]: _type = "Task" [ 1316.960134] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.969315] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240ba0f-4da5-72bf-f4da-04e3b05fa60b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.125953] env[62508]: DEBUG nova.compute.manager [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Received event network-changed-a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1317.126127] env[62508]: DEBUG nova.compute.manager [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Refreshing instance network info cache due to event network-changed-a4972a5c-7644-49db-a0bb-51bc62415b49. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1317.126353] env[62508]: DEBUG oslo_concurrency.lockutils [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] Acquiring lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.130100] env[62508]: DEBUG oslo_concurrency.lockutils [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] Acquired lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.130100] env[62508]: DEBUG nova.network.neutron [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Refreshing network info cache for port a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.140970] env[62508]: DEBUG nova.compute.utils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1317.143200] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1317.143387] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1317.185696] env[62508]: DEBUG nova.network.neutron [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.211970] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775399, 'name': ReconfigVM_Task, 'duration_secs': 0.717688} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.212284] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Reconfigured VM instance instance-0000000c to attach disk [datastore1] e2d4c71b-1164-4c7d-9ffb-7f5489f92d32/e2d4c71b-1164-4c7d-9ffb-7f5489f92d32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1317.214034] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c929d0d-b2d1-4a54-a961-1eb709a6a8ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.220094] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1317.220094] env[62508]: value = "task-1775400" [ 1317.220094] env[62508]: _type = "Task" [ 1317.220094] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.228931] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775400, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.259305] env[62508]: DEBUG nova.policy [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49543399d8054c18bb5e8b7e843c2e52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9030159a6b094d47ad55301d5cb6c5b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1317.336528] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.340024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.340024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.340024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.340024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.341102] env[62508]: INFO nova.compute.manager [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Terminating instance [ 1317.343305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "refresh_cache-fa00f4fe-3bb2-4e17-be22-8a1fda502f65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.343472] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquired lock "refresh_cache-fa00f4fe-3bb2-4e17-be22-8a1fda502f65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.343650] env[62508]: DEBUG nova.network.neutron [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1317.381401] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1317.474052] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240ba0f-4da5-72bf-f4da-04e3b05fa60b, 'name': SearchDatastore_Task, 'duration_secs': 0.011957} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.474052] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.474052] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1317.474052] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.474355] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.474355] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.474355] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d096fec-710b-4264-a8e8-478ef5f9f90f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.482439] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.482606] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1317.483453] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698c31f2-cd7c-4290-a925-3aba2c9798ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.489527] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1317.489527] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243c768-2b96-64f7-5543-4fc25a20153e" [ 1317.489527] env[62508]: _type = "Task" [ 1317.489527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.497966] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243c768-2b96-64f7-5543-4fc25a20153e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.636596] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Successfully created port: 09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1317.648888] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1317.688983] env[62508]: INFO nova.compute.manager [-] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Took 1.03 seconds to deallocate network for instance. [ 1317.734120] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775400, 'name': Rename_Task, 'duration_secs': 0.162127} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.735044] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1317.735606] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3677bb20-9fde-4022-8da2-9ebf28271a43 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.746285] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1317.746285] env[62508]: value = "task-1775401" [ 1317.746285] env[62508]: _type = "Task" [ 1317.746285] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.764174] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.878340] env[62508]: DEBUG nova.network.neutron [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1317.912795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.006318] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243c768-2b96-64f7-5543-4fc25a20153e, 'name': SearchDatastore_Task, 'duration_secs': 0.008741} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.006683] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d452ae-74be-4f30-ac16-959cfef2114b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.010145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.010145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.010145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.011265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.011265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.013434] env[62508]: INFO nova.compute.manager [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Terminating instance [ 1318.016696] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1318.016696] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae28c9-e3fb-7ad1-8dd2-02ca90f1b940" [ 1318.016696] env[62508]: _type = "Task" [ 1318.016696] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.019825] env[62508]: DEBUG nova.compute.manager [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1318.020092] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1318.021050] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca334d4-9e28-4d16-9432-934a71344892 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.038397] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.039032] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae28c9-e3fb-7ad1-8dd2-02ca90f1b940, 'name': SearchDatastore_Task, 'duration_secs': 0.009716} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.041672] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86eaa40f-c21b-4809-bce2-06f54b8ef6d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.043684] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.043983] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 575ea3dc-850d-4078-8678-41b3c40a4c27/575ea3dc-850d-4078-8678-41b3c40a4c27.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1318.044463] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06f93461-ecbd-477c-b0fe-e8f8d25bc13d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.053355] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1318.053355] env[62508]: value = "task-1775403" [ 1318.053355] env[62508]: _type = "Task" [ 1318.053355] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.053866] env[62508]: DEBUG nova.network.neutron [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.057144] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1318.057144] env[62508]: value = "task-1775402" [ 1318.057144] env[62508]: _type = "Task" [ 1318.057144] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.084806] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.085078] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.165560] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82112267-c435-4afb-bd00-14ed8dae9a09 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.173022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d470b6d3-b3fb-41f7-a51d-b34a1f77ae68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.205942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.208251] env[62508]: DEBUG nova.network.neutron [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Updated VIF entry in instance network info cache for port a4972a5c-7644-49db-a0bb-51bc62415b49. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1318.208603] env[62508]: DEBUG nova.network.neutron [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Updating instance_info_cache with network_info: [{"id": "a4972a5c-7644-49db-a0bb-51bc62415b49", "address": "fa:16:3e:d7:bd:c3", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4972a5c-76", "ovs_interfaceid": "a4972a5c-7644-49db-a0bb-51bc62415b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.210970] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e0ff50-e5c9-47ef-8c91-4fdf1546f2e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.220200] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aebfff7-1078-4785-ad00-31c1efd43bc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.235806] env[62508]: DEBUG nova.compute.provider_tree [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.258306] env[62508]: DEBUG oslo_vmware.api [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775401, 'name': PowerOnVM_Task, 'duration_secs': 0.450925} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.258582] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.258782] env[62508]: INFO nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1318.258962] env[62508]: DEBUG nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1318.259789] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c27fb9-9570-48a7-8719-6b6d315cee46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.557381] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Releasing lock "refresh_cache-fa00f4fe-3bb2-4e17-be22-8a1fda502f65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.557826] env[62508]: DEBUG nova.compute.manager [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1318.558072] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1318.564291] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7373c6-9f07-4ed2-8a8d-057472cabf49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.579363] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.579363] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775403, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.583035] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c146645-54e6-4eed-912e-430e15d8f59f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.596024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "a239d78f-085a-4e5c-924d-cf338298fa73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.596024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.596024] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775402, 'name': PowerOffVM_Task, 'duration_secs': 0.183853} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.596024] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1318.596579] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1318.596764] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fff776f1-444d-4ef1-9c57-d44fbf414444 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.600776] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1318.600776] env[62508]: value = "task-1775404" [ 1318.600776] env[62508]: _type = "Task" [ 1318.600776] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.610798] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.666343] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1318.666967] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1318.666967] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Deleting the datastore file [datastore1] 46a524e2-93b0-4726-812f-98e08b6ba0b4 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1318.667880] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1318.673223] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0eadffe1-eed5-4e08-96af-46ea0fe1f50e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.677795] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for the task: (returnval){ [ 1318.677795] env[62508]: value = "task-1775406" [ 1318.677795] env[62508]: _type = "Task" [ 1318.677795] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.686833] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775406, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.706995] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1318.707291] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1318.707453] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1318.707694] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1318.708112] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1318.708112] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1318.708213] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1318.708756] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1318.708756] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1318.708756] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1318.708959] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1318.713151] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963a9fcc-3542-402e-bd8e-842249e7c372 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.721441] env[62508]: DEBUG oslo_concurrency.lockutils [req-24739e22-2ace-4aed-9165-3530845ecc00 req-ff626180-3492-4e08-98fe-ab40eefec5c9 service nova] Releasing lock "refresh_cache-575ea3dc-850d-4078-8678-41b3c40a4c27" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.721441] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934c4524-8289-4582-a632-98b5b891d867 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.739581] env[62508]: DEBUG nova.scheduler.client.report [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1318.778330] env[62508]: INFO nova.compute.manager [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Took 28.37 seconds to build instance. [ 1319.068809] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574957} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.069233] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 575ea3dc-850d-4078-8678-41b3c40a4c27/575ea3dc-850d-4078-8678-41b3c40a4c27.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1319.073389] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1319.073711] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1776173-e920-47de-a6b5-d12945d21d7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.080714] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1319.080714] env[62508]: value = "task-1775407" [ 1319.080714] env[62508]: _type = "Task" [ 1319.080714] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.089170] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.113553] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775404, 'name': PowerOffVM_Task, 'duration_secs': 0.188775} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.114105] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.114377] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1319.114652] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a06392e1-1a73-4092-a2bf-25c695180be0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.144948] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.144948] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.144948] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Deleting the datastore file [datastore1] fa00f4fe-3bb2-4e17-be22-8a1fda502f65 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.145168] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f223afd3-6588-45d5-ab09-e198b80fc4df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.155546] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for the task: (returnval){ [ 1319.155546] env[62508]: value = "task-1775409" [ 1319.155546] env[62508]: _type = "Task" [ 1319.155546] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.165719] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.187579] env[62508]: DEBUG oslo_vmware.api [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Task: {'id': task-1775406, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340381} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.188455] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.188589] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1319.188892] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1319.191029] env[62508]: INFO nova.compute.manager [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1319.191029] env[62508]: DEBUG oslo.service.loopingcall [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.191029] env[62508]: DEBUG nova.compute.manager [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1319.191029] env[62508]: DEBUG nova.network.neutron [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1319.246285] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.248045] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1319.250494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.715s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.252485] env[62508]: INFO nova.compute.claims [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.287855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a63f8e52-db43-4f7f-8aea-78cdfb3bc8b5 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.405s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.591645] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085973} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.591933] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1319.592896] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd30c28-49bb-4479-b2a7-4eac279c8b51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.619962] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 575ea3dc-850d-4078-8678-41b3c40a4c27/575ea3dc-850d-4078-8678-41b3c40a4c27.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1319.620496] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b372a4e3-b3e6-4653-8e54-1cdd97d338cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.646814] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1319.646814] env[62508]: value = "task-1775410" [ 1319.646814] env[62508]: _type = "Task" [ 1319.646814] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.656398] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.665824] env[62508]: DEBUG oslo_vmware.api [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Task: {'id': task-1775409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.665986] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.666285] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1319.666549] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1319.666809] env[62508]: INFO nova.compute.manager [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1319.667139] env[62508]: DEBUG oslo.service.loopingcall [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.667433] env[62508]: DEBUG nova.compute.manager [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1319.667600] env[62508]: DEBUG nova.network.neutron [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1319.712244] env[62508]: DEBUG nova.network.neutron [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.759199] env[62508]: DEBUG nova.compute.utils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1319.763265] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1319.763582] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1319.789451] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.901068] env[62508]: DEBUG nova.policy [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49543399d8054c18bb5e8b7e843c2e52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9030159a6b094d47ad55301d5cb6c5b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1320.145305] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Successfully updated port: 09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1320.163845] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775410, 'name': ReconfigVM_Task, 'duration_secs': 0.264875} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.164241] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 575ea3dc-850d-4078-8678-41b3c40a4c27/575ea3dc-850d-4078-8678-41b3c40a4c27.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1320.164907] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-220b13f8-531b-4089-a877-8c67ab22dfc6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.172693] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1320.172693] env[62508]: value = "task-1775411" [ 1320.172693] env[62508]: _type = "Task" [ 1320.172693] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.182644] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775411, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.214968] env[62508]: DEBUG nova.network.neutron [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.262477] env[62508]: DEBUG nova.compute.manager [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Received event network-vif-plugged-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1320.262477] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] Acquiring lock "192995e7-82f5-41be-990d-d91b93f981e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.262477] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] Lock "192995e7-82f5-41be-990d-d91b93f981e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.262477] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] Lock "192995e7-82f5-41be-990d-d91b93f981e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.262477] env[62508]: DEBUG nova.compute.manager [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] No waiting events found dispatching network-vif-plugged-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1320.262999] env[62508]: WARNING nova.compute.manager [req-7f4498b9-71b9-40b4-a096-cbbe9961c849 req-108c065b-b39f-4d93-adbd-b43a648bf0c9 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Received unexpected event network-vif-plugged-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 for instance with vm_state building and task_state spawning. [ 1320.266945] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1320.329028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.528120] env[62508]: DEBUG nova.network.neutron [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.657791] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.657965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.658162] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1320.664513] env[62508]: DEBUG nova.compute.manager [req-a6e8b13e-8229-459e-bb7a-df80bd4fff9a req-a8fe5fc8-1a08-4d7c-91fc-548f08c62898 service nova] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Received event network-vif-deleted-a5f76bdc-0abb-4e34-b1bc-7711f90a4c29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1320.668595] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Successfully created port: 2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1320.686154] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775411, 'name': Rename_Task, 'duration_secs': 0.143111} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.690677] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.695404] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d939013-a4ad-4155-a39d-f49e329684b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.706755] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1320.706755] env[62508]: value = "task-1775412" [ 1320.706755] env[62508]: _type = "Task" [ 1320.706755] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.718907] env[62508]: INFO nova.compute.manager [-] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Took 1.05 seconds to deallocate network for instance. [ 1320.733296] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775412, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.787698] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fda292f-8689-4eaf-b0fe-343715f04917 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.797080] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6393de06-1d70-47d4-bd56-72cd955ef707 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.833958] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd84b47-607a-448e-bf99-f39a755e58a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.843469] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb1ac00-19b9-4e9a-9e74-f3c93b3105ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.864801] env[62508]: DEBUG nova.compute.provider_tree [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.031059] env[62508]: INFO nova.compute.manager [-] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Took 1.84 seconds to deallocate network for instance. [ 1321.218450] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775412, 'name': PowerOnVM_Task, 'duration_secs': 0.440044} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.218924] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.219025] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Took 8.58 seconds to spawn the instance on the hypervisor. [ 1321.220298] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1321.220298] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61b4802-064f-445d-b7d2-a259b1dd8808 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.227247] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.282630] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1321.288547] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1321.310395] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "ffe54977-81c4-4842-9773-eed704a53ada" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.310644] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.318581] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1321.319240] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1321.319240] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.319346] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1321.319711] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.319711] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1321.320108] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1321.320281] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1321.320565] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1321.320642] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1321.320781] env[62508]: DEBUG nova.virt.hardware [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1321.321997] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08a75d9-4d04-4bd3-926d-14d56dc9576c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.331438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afbb34f-09f7-443a-9da4-118508ba64f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.368612] env[62508]: DEBUG nova.scheduler.client.report [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.542332] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.706746] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Updating instance_info_cache with network_info: [{"id": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "address": "fa:16:3e:92:46:df", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f33fb7-0c", "ovs_interfaceid": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.745677] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Took 30.89 seconds to build instance. [ 1321.875950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.877352] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1321.881919] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.255s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.881919] env[62508]: INFO nova.compute.claims [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.211041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.211041] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Instance network_info: |[{"id": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "address": "fa:16:3e:92:46:df", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f33fb7-0c", "ovs_interfaceid": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1322.211705] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:46:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1322.225519] env[62508]: DEBUG oslo.service.loopingcall [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1322.225856] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1322.226370] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-957b381c-d7d0-4f12-80a8-26ebd3d9667c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.250460] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.987s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.251285] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1322.251285] env[62508]: value = "task-1775413" [ 1322.251285] env[62508]: _type = "Task" [ 1322.251285] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.262170] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775413, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.392609] env[62508]: DEBUG nova.compute.utils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.392609] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1322.392609] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1322.463769] env[62508]: DEBUG nova.policy [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70e76262fb524f7cbcac156416ff645f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4516318f4ea84bf2bcc39bd70cdca54a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1322.757500] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1322.770178] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775413, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.910585] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1323.064619] env[62508]: DEBUG nova.compute.manager [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Received event network-changed-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.064807] env[62508]: DEBUG nova.compute.manager [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Refreshing instance network info cache due to event network-changed-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1323.065197] env[62508]: DEBUG oslo_concurrency.lockutils [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] Acquiring lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.065497] env[62508]: DEBUG oslo_concurrency.lockutils [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] Acquired lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.065666] env[62508]: DEBUG nova.network.neutron [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Refreshing network info cache for port 09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.151878] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Successfully updated port: 2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1323.168305] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Successfully created port: 87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1323.262444] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775413, 'name': CreateVM_Task, 'duration_secs': 0.689826} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.265812] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.273353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.273967] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.273967] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1323.280069] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e83d8e21-2599-450b-8fd0-de7bcb260a2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.285661] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1323.285661] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed5855-d364-d500-e853-a2f9f1b2a759" [ 1323.285661] env[62508]: _type = "Task" [ 1323.285661] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.293497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.299501] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed5855-d364-d500-e853-a2f9f1b2a759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.480701] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d81786-420f-4205-9231-2acbaf091909 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.489202] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5c1b52-e747-40e4-8c7f-030ada67ba10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.523361] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bd5d69-84f3-4268-9dda-95a2227f3d67 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.531877] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef1681d-1a79-4453-aa4e-c0c2c8498ea3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.547015] env[62508]: DEBUG nova.compute.provider_tree [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.657425] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.657709] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.657880] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.763860] env[62508]: DEBUG nova.compute.manager [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Received event network-vif-plugged-2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.764054] env[62508]: DEBUG oslo_concurrency.lockutils [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] Acquiring lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.764259] env[62508]: DEBUG oslo_concurrency.lockutils [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.764417] env[62508]: DEBUG oslo_concurrency.lockutils [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.764585] env[62508]: DEBUG nova.compute.manager [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] No waiting events found dispatching network-vif-plugged-2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1323.764733] env[62508]: WARNING nova.compute.manager [req-8c1d9fe4-ffd7-45e9-be28-776e7407e370 req-74d6468f-4358-46bc-a500-280ad8f5e4a4 service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Received unexpected event network-vif-plugged-2fb8da4e-64be-4341-9f85-2c75e0c67dfb for instance with vm_state building and task_state spawning. [ 1323.799401] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed5855-d364-d500-e853-a2f9f1b2a759, 'name': SearchDatastore_Task, 'duration_secs': 0.022656} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.799716] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.799999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1323.800303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.800451] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.800982] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.801644] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-315878af-ad32-42e9-a9c3-ff1ce39f6964 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.810904] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.810904] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1323.812683] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84cd1835-282c-494a-8564-91b2eb9a69ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.818759] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1323.818759] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260cb8c-8d8c-b9e0-ea69-3f2bbebbbc81" [ 1323.818759] env[62508]: _type = "Task" [ 1323.818759] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.826265] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260cb8c-8d8c-b9e0-ea69-3f2bbebbbc81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.926780] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1323.961703] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1323.961943] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1323.962115] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.962307] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1323.962455] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.962598] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1323.962796] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1323.962954] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1323.963133] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1323.963515] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1323.963515] env[62508]: DEBUG nova.virt.hardware [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1323.964357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d2b045-1a72-41f6-8227-b031c527e5b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.973172] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45642d79-697c-4438-969d-f2032654068b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.049252] env[62508]: DEBUG nova.scheduler.client.report [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1324.073859] env[62508]: DEBUG nova.network.neutron [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Updated VIF entry in instance network info cache for port 09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.074264] env[62508]: DEBUG nova.network.neutron [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Updating instance_info_cache with network_info: [{"id": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "address": "fa:16:3e:92:46:df", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f33fb7-0c", "ovs_interfaceid": "09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.230696] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.331276] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260cb8c-8d8c-b9e0-ea69-3f2bbebbbc81, 'name': SearchDatastore_Task, 'duration_secs': 0.010278} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.338393] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2cd6b4-22eb-4886-8842-88e2c8324252 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.346926] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1324.346926] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52734b9f-4420-f0ef-954f-873df779f7e4" [ 1324.346926] env[62508]: _type = "Task" [ 1324.346926] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.362464] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52734b9f-4420-f0ef-954f-873df779f7e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.556516] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.557027] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1324.562463] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.368s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.562799] env[62508]: DEBUG nova.objects.instance [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1324.578067] env[62508]: DEBUG oslo_concurrency.lockutils [req-8ab66dee-e454-43d3-8db0-01344b19827d req-ffda7c54-a6e9-45fb-9199-4c374f3efbb3 service nova] Releasing lock "refresh_cache-192995e7-82f5-41be-990d-d91b93f981e1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.632982] env[62508]: DEBUG nova.network.neutron [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Updating instance_info_cache with network_info: [{"id": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "address": "fa:16:3e:01:06:ef", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fb8da4e-64", "ovs_interfaceid": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.857353] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52734b9f-4420-f0ef-954f-873df779f7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.03918} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.857772] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.858192] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 192995e7-82f5-41be-990d-d91b93f981e1/192995e7-82f5-41be-990d-d91b93f981e1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1324.858626] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19916122-ba4b-4317-b0ea-a1683725003b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.864946] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1324.864946] env[62508]: value = "task-1775414" [ 1324.864946] env[62508]: _type = "Task" [ 1324.864946] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.878676] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.067343] env[62508]: DEBUG nova.compute.utils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1325.078614] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1325.078873] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1325.083709] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Successfully updated port: 87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.117856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.117856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.135126] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.135777] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Instance network_info: |[{"id": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "address": "fa:16:3e:01:06:ef", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fb8da4e-64", "ovs_interfaceid": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1325.136205] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:06:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fb8da4e-64be-4341-9f85-2c75e0c67dfb', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1325.146623] env[62508]: DEBUG oslo.service.loopingcall [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.146623] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1325.146900] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac49ceb7-fc2e-4d75-977c-40575e7664e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.168889] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1325.168889] env[62508]: value = "task-1775415" [ 1325.168889] env[62508]: _type = "Task" [ 1325.168889] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.176862] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775415, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.301107] env[62508]: DEBUG nova.policy [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4e5f76e5d1e42838eda29c2c110c17f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51842f3e9f83452789923afbafd40bc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1325.386954] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775414, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.581020] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1325.588935] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2c554f3-2eeb-4188-825e-54f6b066da7f tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.590652] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.590917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquired lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.591204] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1325.596019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.791s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.596019] env[62508]: INFO nova.compute.claims [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1325.680795] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775415, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.881944] env[62508]: DEBUG nova.compute.manager [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Received event network-vif-plugged-87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1325.881944] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.881944] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.881944] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.881944] env[62508]: DEBUG nova.compute.manager [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] No waiting events found dispatching network-vif-plugged-87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1325.882239] env[62508]: WARNING nova.compute.manager [req-a2a4874e-3c34-432a-89ce-b7ac5714f0a5 req-0949228e-d485-4c04-966e-975f38a00cab service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Received unexpected event network-vif-plugged-87421d0c-fb71-4543-be75-596ccb1584a9 for instance with vm_state building and task_state spawning. [ 1325.886897] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669405} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.888159] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 192995e7-82f5-41be-990d-d91b93f981e1/192995e7-82f5-41be-990d-d91b93f981e1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.888159] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.888159] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ffdaef9-b912-41b7-ab3f-706ffe6d5906 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.899874] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1325.899874] env[62508]: value = "task-1775416" [ 1325.899874] env[62508]: _type = "Task" [ 1325.899874] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.910156] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775416, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.939440] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Successfully created port: b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.176496] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1326.186112] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775415, 'name': CreateVM_Task, 'duration_secs': 0.589893} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.186112] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1326.186112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.186112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.186112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1326.186388] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3399659-2273-4c04-8d1f-66f3c75233fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.194917] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1326.194917] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522f84aa-c037-ab92-d7be-0ca5c9c90c80" [ 1326.194917] env[62508]: _type = "Task" [ 1326.194917] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.205093] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522f84aa-c037-ab92-d7be-0ca5c9c90c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.414332] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775416, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06693} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.414647] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.417255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6f9478-7223-4234-8cc8-54675b88c60d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.440782] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 192995e7-82f5-41be-990d-d91b93f981e1/192995e7-82f5-41be-990d-d91b93f981e1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.441587] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acdc7f5d-dca2-4640-9653-a859b658518c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.463651] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1326.463651] env[62508]: value = "task-1775417" [ 1326.463651] env[62508]: _type = "Task" [ 1326.463651] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.474157] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.503506] env[62508]: DEBUG nova.network.neutron [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updating instance_info_cache with network_info: [{"id": "87421d0c-fb71-4543-be75-596ccb1584a9", "address": "fa:16:3e:0c:7f:32", "network": {"id": "5e2ac7df-b6ed-4cab-8db2-6da4e3cc5fc4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-966305863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4516318f4ea84bf2bcc39bd70cdca54a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8d3908ba-a3dc-4f88-988b-f997cf7257e2", "external-id": "nsx-vlan-transportzone-116", "segmentation_id": 116, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87421d0c-fb", "ovs_interfaceid": "87421d0c-fb71-4543-be75-596ccb1584a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.569356] env[62508]: DEBUG nova.compute.manager [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Received event network-changed-2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1326.569550] env[62508]: DEBUG nova.compute.manager [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Refreshing instance network info cache due to event network-changed-2fb8da4e-64be-4341-9f85-2c75e0c67dfb. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1326.569835] env[62508]: DEBUG oslo_concurrency.lockutils [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] Acquiring lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.570197] env[62508]: DEBUG oslo_concurrency.lockutils [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] Acquired lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.570418] env[62508]: DEBUG nova.network.neutron [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Refreshing network info cache for port 2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1326.594771] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1326.628272] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:09:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='55787061',id=27,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-581998823',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1326.628542] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1326.628717] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1326.628899] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1326.629533] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1326.629533] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1326.629533] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1326.629736] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1326.629797] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1326.629931] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1326.630166] env[62508]: DEBUG nova.virt.hardware [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1326.631108] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fde98a3-f391-495a-a37c-7198c7ead854 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.644125] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e48a6b-6e06-4c5d-878b-bdb495a97b0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.707821] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522f84aa-c037-ab92-d7be-0ca5c9c90c80, 'name': SearchDatastore_Task, 'duration_secs': 0.017178} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.708193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.708442] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.708675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.708823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.709094] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.709548] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da9d86ba-e592-4f5d-ae4a-2797d6dc96f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.722120] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.722120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1326.726263] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e049869-0cf9-424f-bc66-9c8abc21cb18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.731034] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1326.731034] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5289bfab-3c0c-cb3e-e62d-27a7f509eff7" [ 1326.731034] env[62508]: _type = "Task" [ 1326.731034] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.743364] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5289bfab-3c0c-cb3e-e62d-27a7f509eff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.978193] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775417, 'name': ReconfigVM_Task, 'duration_secs': 0.289805} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.981741] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 192995e7-82f5-41be-990d-d91b93f981e1/192995e7-82f5-41be-990d-d91b93f981e1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1326.981943] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65991a44-b89e-4274-93e7-da733f873866 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.989365] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1326.989365] env[62508]: value = "task-1775418" [ 1326.989365] env[62508]: _type = "Task" [ 1326.989365] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.001243] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775418, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.004616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Releasing lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.004916] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Instance network_info: |[{"id": "87421d0c-fb71-4543-be75-596ccb1584a9", "address": "fa:16:3e:0c:7f:32", "network": {"id": "5e2ac7df-b6ed-4cab-8db2-6da4e3cc5fc4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-966305863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4516318f4ea84bf2bcc39bd70cdca54a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8d3908ba-a3dc-4f88-988b-f997cf7257e2", "external-id": "nsx-vlan-transportzone-116", "segmentation_id": 116, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87421d0c-fb", "ovs_interfaceid": "87421d0c-fb71-4543-be75-596ccb1584a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1327.005332] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:7f:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8d3908ba-a3dc-4f88-988b-f997cf7257e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87421d0c-fb71-4543-be75-596ccb1584a9', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.013546] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Creating folder: Project (4516318f4ea84bf2bcc39bd70cdca54a). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.016625] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f7a8035-b257-4fd2-9e0f-6eb57e53a898 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.026050] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Created folder: Project (4516318f4ea84bf2bcc39bd70cdca54a) in parent group-v368536. [ 1327.026504] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Creating folder: Instances. Parent ref: group-v368580. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1327.026798] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ae44919-c070-44bd-95e9-b3f3b71a0088 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.035478] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Created folder: Instances in parent group-v368580. [ 1327.035773] env[62508]: DEBUG oslo.service.loopingcall [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.035977] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.036201] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afdcb96b-78c7-4e3c-989c-bdcdcf58cd8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.061437] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.061437] env[62508]: value = "task-1775421" [ 1327.061437] env[62508]: _type = "Task" [ 1327.061437] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.070809] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775421, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.155406] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2171d69-9b55-4320-be4b-23e79c59352d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.163786] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aea285-9221-4d72-b025-c9436a775e8b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.196195] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863e1e93-5403-49dc-b1a8-9a6292dda523 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.202969] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2b3882-aef0-4318-8532-91857246d7c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.219948] env[62508]: DEBUG nova.compute.provider_tree [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1327.242692] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5289bfab-3c0c-cb3e-e62d-27a7f509eff7, 'name': SearchDatastore_Task, 'duration_secs': 0.008689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.243530] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c316d442-f59d-420e-8456-1009908d2171 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.249350] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1327.249350] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f7909-e40d-3010-4729-7fa16654e4aa" [ 1327.249350] env[62508]: _type = "Task" [ 1327.249350] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.257756] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f7909-e40d-3010-4729-7fa16654e4aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.404216] env[62508]: DEBUG nova.network.neutron [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Updated VIF entry in instance network info cache for port 2fb8da4e-64be-4341-9f85-2c75e0c67dfb. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1327.404597] env[62508]: DEBUG nova.network.neutron [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Updating instance_info_cache with network_info: [{"id": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "address": "fa:16:3e:01:06:ef", "network": {"id": "b9b83341-56f3-4400-b7ee-bceb6e21fc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1632679867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9030159a6b094d47ad55301d5cb6c5b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fb8da4e-64", "ovs_interfaceid": "2fb8da4e-64be-4341-9f85-2c75e0c67dfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.500116] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775418, 'name': Rename_Task, 'duration_secs': 0.230845} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.500416] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.500685] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-736818c6-1714-44a5-8e99-e94561bbdb7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.507842] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1327.507842] env[62508]: value = "task-1775422" [ 1327.507842] env[62508]: _type = "Task" [ 1327.507842] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.516783] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.573964] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775421, 'name': CreateVM_Task, 'duration_secs': 0.350209} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.573964] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.574655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.574780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.575124] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1327.575384] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec604724-3682-4fca-99f3-c44ed11b7d0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.580468] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1327.580468] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a025b-e8ff-7643-9f9f-b8cfda84b873" [ 1327.580468] env[62508]: _type = "Task" [ 1327.580468] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.588681] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a025b-e8ff-7643-9f9f-b8cfda84b873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.714490] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Successfully updated port: b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1327.744801] env[62508]: ERROR nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [req-1333bfc8-9386-4278-a95e-3e443a464db8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1333bfc8-9386-4278-a95e-3e443a464db8"}]} [ 1327.758997] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f7909-e40d-3010-4729-7fa16654e4aa, 'name': SearchDatastore_Task, 'duration_secs': 0.011222} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.760020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.760453] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 03552483-a365-4d25-94bc-ea9b38ee6cd6/03552483-a365-4d25-94bc-ea9b38ee6cd6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1327.760650] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9e34c05-b4ca-49cb-af30-6d159a335304 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.766762] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1327.766762] env[62508]: value = "task-1775423" [ 1327.766762] env[62508]: _type = "Task" [ 1327.766762] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.771039] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1327.777998] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.786031] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1327.786476] env[62508]: DEBUG nova.compute.provider_tree [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1327.807296] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1327.826649] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1327.907931] env[62508]: DEBUG oslo_concurrency.lockutils [req-75599f70-78c6-40df-b95a-94981760ccbd req-329d7b66-741b-4b9e-b0ff-244ac761a08e service nova] Releasing lock "refresh_cache-03552483-a365-4d25-94bc-ea9b38ee6cd6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.018981] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775422, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.096426] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a025b-e8ff-7643-9f9f-b8cfda84b873, 'name': SearchDatastore_Task, 'duration_secs': 0.011392} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.096763] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.097065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.097364] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.097578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.097768] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1328.098116] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea4b1366-a186-44ae-a1e6-bf5413c71faf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.115635] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1328.116207] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1328.116831] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80ddc2ff-1530-4bc0-874e-f112bc80611d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.125906] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1328.125906] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5281aa60-b155-5464-a96f-f438aea90a76" [ 1328.125906] env[62508]: _type = "Task" [ 1328.125906] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.137960] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5281aa60-b155-5464-a96f-f438aea90a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.216816] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.217040] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.217207] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.280264] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775423, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.313667] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c9d679-55ac-4252-a6a0-22ad7b628903 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.322875] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93d0ec0-0b4c-4cf9-b883-9959b1823650 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.367145] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7088781-328b-4cb0-a25c-bd530b7f7399 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.376117] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955e54c7-8129-47c0-aec8-150ff8828d99 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.390401] env[62508]: DEBUG nova.compute.provider_tree [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1328.515580] env[62508]: DEBUG nova.compute.manager [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Received event network-changed-87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1328.515882] env[62508]: DEBUG nova.compute.manager [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Refreshing instance network info cache due to event network-changed-87421d0c-fb71-4543-be75-596ccb1584a9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1328.516015] env[62508]: DEBUG oslo_concurrency.lockutils [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] Acquiring lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.516166] env[62508]: DEBUG oslo_concurrency.lockutils [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] Acquired lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.516336] env[62508]: DEBUG nova.network.neutron [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Refreshing network info cache for port 87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1328.530747] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775422, 'name': PowerOnVM_Task, 'duration_secs': 0.554358} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.530913] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1328.531045] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1328.531337] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1328.533009] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db1cff3-8512-4a2e-8f59-2c2450b11c4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.639242] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5281aa60-b155-5464-a96f-f438aea90a76, 'name': SearchDatastore_Task, 'duration_secs': 0.066519} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.640178] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc3bde30-1f75-4795-877e-3a180a99a785 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.645837] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1328.645837] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ab5ec-a2fa-6777-a05f-973405dfc22b" [ 1328.645837] env[62508]: _type = "Task" [ 1328.645837] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.659038] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ab5ec-a2fa-6777-a05f-973405dfc22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.769642] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.784738] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611875} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.784873] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 03552483-a365-4d25-94bc-ea9b38ee6cd6/03552483-a365-4d25-94bc-ea9b38ee6cd6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.785106] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.785366] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27d77e10-9d53-4c36-9a99-077208198927 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.791998] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1328.791998] env[62508]: value = "task-1775424" [ 1328.791998] env[62508]: _type = "Task" [ 1328.791998] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.806021] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.914120] env[62508]: ERROR nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [req-61f4944a-1fd9-48ce-aea1-2cfe5bf9d071] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-61f4944a-1fd9-48ce-aea1-2cfe5bf9d071"}]} [ 1328.930098] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1328.949020] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1328.949272] env[62508]: DEBUG nova.compute.provider_tree [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1328.961441] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1328.979900] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1329.053676] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Took 32.73 seconds to build instance. [ 1329.078299] env[62508]: DEBUG nova.network.neutron [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updating instance_info_cache with network_info: [{"id": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "address": "fa:16:3e:d4:1b:46", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ef30a6-93", "ovs_interfaceid": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.159671] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ab5ec-a2fa-6777-a05f-973405dfc22b, 'name': SearchDatastore_Task, 'duration_secs': 0.051801} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.159947] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.160273] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7d23d8f0-d7a9-4236-ad28-208e77b72138/7d23d8f0-d7a9-4236-ad28-208e77b72138.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1329.160542] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d57c0e9-f3af-4705-ac7c-f82d4d586ec2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.169929] env[62508]: DEBUG nova.compute.manager [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Received event network-vif-plugged-b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1329.170322] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Acquiring lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.170455] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.170584] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.170793] env[62508]: DEBUG nova.compute.manager [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] No waiting events found dispatching network-vif-plugged-b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1329.170955] env[62508]: WARNING nova.compute.manager [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Received unexpected event network-vif-plugged-b7ef30a6-939d-4546-9597-db2b4cc755ae for instance with vm_state building and task_state spawning. [ 1329.171189] env[62508]: DEBUG nova.compute.manager [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Received event network-changed-b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1329.171285] env[62508]: DEBUG nova.compute.manager [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Refreshing instance network info cache due to event network-changed-b7ef30a6-939d-4546-9597-db2b4cc755ae. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1329.171431] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Acquiring lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.180600] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1329.180600] env[62508]: value = "task-1775425" [ 1329.180600] env[62508]: _type = "Task" [ 1329.180600] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.189384] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.266179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.266179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.304464] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07337} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.304464] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1329.304754] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068475bf-0234-4715-aa0d-0da629cac743 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.328729] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 03552483-a365-4d25-94bc-ea9b38ee6cd6/03552483-a365-4d25-94bc-ea9b38ee6cd6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1329.335051] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36d43ce9-1e96-46c7-ba69-fc414653c714 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.354736] env[62508]: DEBUG nova.network.neutron [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updated VIF entry in instance network info cache for port 87421d0c-fb71-4543-be75-596ccb1584a9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1329.354736] env[62508]: DEBUG nova.network.neutron [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updating instance_info_cache with network_info: [{"id": "87421d0c-fb71-4543-be75-596ccb1584a9", "address": "fa:16:3e:0c:7f:32", "network": {"id": "5e2ac7df-b6ed-4cab-8db2-6da4e3cc5fc4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-966305863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4516318f4ea84bf2bcc39bd70cdca54a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8d3908ba-a3dc-4f88-988b-f997cf7257e2", "external-id": "nsx-vlan-transportzone-116", "segmentation_id": 116, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87421d0c-fb", "ovs_interfaceid": "87421d0c-fb71-4543-be75-596ccb1584a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.360644] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1329.360644] env[62508]: value = "task-1775426" [ 1329.360644] env[62508]: _type = "Task" [ 1329.360644] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.371010] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.512897] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7568f464-ee45-4ad0-a7aa-6ea3380a7280 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.520713] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dd33af-1426-4d6b-866d-7422622e99a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.556022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da21f1e5-d440-425d-bcea-c78175552642 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.556985] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.260s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.563071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435b6620-7b6f-4293-b585-5f4360386049 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.577252] env[62508]: DEBUG nova.compute.provider_tree [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.581865] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.581865] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Instance network_info: |[{"id": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "address": "fa:16:3e:d4:1b:46", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ef30a6-93", "ovs_interfaceid": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.582768] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Acquired lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.582768] env[62508]: DEBUG nova.network.neutron [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Refreshing network info cache for port b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.583488] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:1b:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7ef30a6-939d-4546-9597-db2b4cc755ae', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.591381] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Creating folder: Project (51842f3e9f83452789923afbafd40bc4). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.592662] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-653ce377-f657-455e-b1fd-917bc9389b6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.602953] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Created folder: Project (51842f3e9f83452789923afbafd40bc4) in parent group-v368536. [ 1329.603170] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Creating folder: Instances. Parent ref: group-v368583. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.603414] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4b76d9e-5e41-4697-9129-1fe553df23d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.613326] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Created folder: Instances in parent group-v368583. [ 1329.613575] env[62508]: DEBUG oslo.service.loopingcall [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.613766] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.613974] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f7cf030-a99d-4006-97d0-7bd3bf125665 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.633557] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.633557] env[62508]: value = "task-1775429" [ 1329.633557] env[62508]: _type = "Task" [ 1329.633557] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.641113] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775429, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.689875] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775425, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.857561] env[62508]: DEBUG oslo_concurrency.lockutils [req-56eda7e2-f622-473c-99a6-2b5a99dc0f3a req-3dde777b-8d4a-4f4f-83e6-d9caee3e11f7 service nova] Releasing lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.871720] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775426, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.060684] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1330.081028] env[62508]: DEBUG nova.scheduler.client.report [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1330.146645] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775429, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.194620] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775425, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.969996} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.194929] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7d23d8f0-d7a9-4236-ad28-208e77b72138/7d23d8f0-d7a9-4236-ad28-208e77b72138.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1330.195185] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1330.195385] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5661afb-32b3-4f9a-a340-3667df771dd8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.202440] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1330.202440] env[62508]: value = "task-1775430" [ 1330.202440] env[62508]: _type = "Task" [ 1330.202440] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.213023] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.373910] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775426, 'name': ReconfigVM_Task, 'duration_secs': 1.012549} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.377462] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 03552483-a365-4d25-94bc-ea9b38ee6cd6/03552483-a365-4d25-94bc-ea9b38ee6cd6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.377462] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-291a8944-bf67-4ac0-b053-ff45fbb62fba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.382053] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1330.382053] env[62508]: value = "task-1775431" [ 1330.382053] env[62508]: _type = "Task" [ 1330.382053] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.390115] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775431, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.416446] env[62508]: DEBUG nova.network.neutron [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updated VIF entry in instance network info cache for port b7ef30a6-939d-4546-9597-db2b4cc755ae. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.416792] env[62508]: DEBUG nova.network.neutron [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updating instance_info_cache with network_info: [{"id": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "address": "fa:16:3e:d4:1b:46", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ef30a6-93", "ovs_interfaceid": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.559072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.559410] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.582989] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.586723] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.993s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.587299] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1330.590140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.285s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.590552] env[62508]: DEBUG nova.objects.instance [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lazy-loading 'resources' on Instance uuid b182d3aa-a4de-4879-ab36-2cb51472158a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.646508] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775429, 'name': CreateVM_Task, 'duration_secs': 0.572021} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.646701] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.647418] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.648232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.648232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.648232] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fee93cfd-a3c6-4fc2-ae88-cf5b608e64ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.653366] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1330.653366] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528733b2-eb95-b182-1f21-1f3eb9b794d3" [ 1330.653366] env[62508]: _type = "Task" [ 1330.653366] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.661987] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528733b2-eb95-b182-1f21-1f3eb9b794d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.712141] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143795} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.712385] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.713175] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b093f12-ff8e-45c7-a9b3-68511bd101ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.735893] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 7d23d8f0-d7a9-4236-ad28-208e77b72138/7d23d8f0-d7a9-4236-ad28-208e77b72138.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.736221] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dac254b-0ebb-4e45-b999-c2f10b566e08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.758661] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1330.758661] env[62508]: value = "task-1775432" [ 1330.758661] env[62508]: _type = "Task" [ 1330.758661] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.767325] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775432, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.892025] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775431, 'name': Rename_Task, 'duration_secs': 0.236039} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.892798] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1330.892798] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36de4672-574a-4853-89aa-4cc17047e0aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.902044] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1330.902044] env[62508]: value = "task-1775433" [ 1330.902044] env[62508]: _type = "Task" [ 1330.902044] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.910011] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.919778] env[62508]: DEBUG oslo_concurrency.lockutils [req-5042d1d5-8a0b-423e-bf0e-9ccf84b7b858 req-ab5cc928-ea61-42e0-bfdb-9669f9735d11 service nova] Releasing lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.093154] env[62508]: DEBUG nova.compute.utils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1331.094686] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1331.094862] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1331.167213] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528733b2-eb95-b182-1f21-1f3eb9b794d3, 'name': SearchDatastore_Task, 'duration_secs': 0.010882} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.169017] env[62508]: DEBUG nova.policy [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce1e68b6f3ce46f28171c11444a66ad9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dcb5fd2552e42188651162384519043', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1331.170676] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.170924] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.171174] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.171338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.171519] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.173036] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-574248fc-331c-46b2-97c3-0bb20962f7a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.183071] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.183265] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.183995] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96428e9d-6d94-48ea-8509-a202f4d4250c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.190365] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1331.190365] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260e398-4274-0e59-3134-1cb2606d52a6" [ 1331.190365] env[62508]: _type = "Task" [ 1331.190365] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.201971] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260e398-4274-0e59-3134-1cb2606d52a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.271572] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775432, 'name': ReconfigVM_Task, 'duration_secs': 0.280743} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.271918] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 7d23d8f0-d7a9-4236-ad28-208e77b72138/7d23d8f0-d7a9-4236-ad28-208e77b72138.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.272474] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69d69058-1d1d-4a90-b217-d7c611dc9e7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.282356] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1331.282356] env[62508]: value = "task-1775434" [ 1331.282356] env[62508]: _type = "Task" [ 1331.282356] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.291564] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775434, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.413828] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.548201] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cf9b42-935a-498e-8016-808cf22bf3cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.556021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e30c0ef-189f-4e51-bd63-72083519b307 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.586834] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Successfully created port: 57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.589197] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a830cacf-73df-4cb6-8c7a-f06f3d31c705 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.599435] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3931b652-2a45-4428-bd40-3686a1d370dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.603505] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1331.616763] env[62508]: DEBUG nova.compute.provider_tree [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.703523] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260e398-4274-0e59-3134-1cb2606d52a6, 'name': SearchDatastore_Task, 'duration_secs': 0.034264} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.704318] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a47cde5d-eae0-4bfa-a772-3169a2aec4fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.709571] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1331.709571] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f4a75c-f27d-446d-54e4-dccbf58fbb3e" [ 1331.709571] env[62508]: _type = "Task" [ 1331.709571] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.719030] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f4a75c-f27d-446d-54e4-dccbf58fbb3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.795201] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775434, 'name': Rename_Task, 'duration_secs': 0.133482} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.795201] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.795201] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74e1ff6b-abb5-44ae-83b7-55f88010469e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.802026] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1331.802026] env[62508]: value = "task-1775435" [ 1331.802026] env[62508]: _type = "Task" [ 1331.802026] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.807758] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775435, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.914691] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.119838] env[62508]: DEBUG nova.scheduler.client.report [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.220567] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f4a75c-f27d-446d-54e4-dccbf58fbb3e, 'name': SearchDatastore_Task, 'duration_secs': 0.054924} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.220728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.220920] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b911f25d-711b-411e-bb2d-2e59386ff2ea/b911f25d-711b-411e-bb2d-2e59386ff2ea.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1332.221194] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0211786f-975d-41ec-8b2a-c232b5ae4a86 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.227903] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1332.227903] env[62508]: value = "task-1775436" [ 1332.227903] env[62508]: _type = "Task" [ 1332.227903] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.235777] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.310649] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775435, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.414164] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.613276] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1332.625419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.629452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.812s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.630010] env[62508]: DEBUG nova.objects.instance [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lazy-loading 'resources' on Instance uuid 827b0887-2132-49af-bcce-cedc7237245d {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.650647] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.650946] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.651194] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.651541] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.651541] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.651661] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.652187] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.652187] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.652367] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.652614] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.652833] env[62508]: DEBUG nova.virt.hardware [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.654817] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e3f27c-f7a9-4664-aeba-91f73d85b22f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.666113] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748f2734-8ddb-4d57-9ace-1ba6af2691dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.671398] env[62508]: INFO nova.scheduler.client.report [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Deleted allocations for instance b182d3aa-a4de-4879-ab36-2cb51472158a [ 1332.741196] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775436, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.810667] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775435, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.915099] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.206029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dabe1834-1192-4e52-9b8a-fc34d5505558 tempest-TenantUsagesTestJSON-1283949153 tempest-TenantUsagesTestJSON-1283949153-project-member] Lock "b182d3aa-a4de-4879-ab36-2cb51472158a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.322s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.243042] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51514} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.245840] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b911f25d-711b-411e-bb2d-2e59386ff2ea/b911f25d-711b-411e-bb2d-2e59386ff2ea.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1333.248630] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1333.248630] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61d065e0-61b4-4832-a45e-3611267d9969 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.254672] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1333.254672] env[62508]: value = "task-1775437" [ 1333.254672] env[62508]: _type = "Task" [ 1333.254672] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.267811] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.314962] env[62508]: DEBUG oslo_vmware.api [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775435, 'name': PowerOnVM_Task, 'duration_secs': 1.268332} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.315315] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.315558] env[62508]: INFO nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1333.315778] env[62508]: DEBUG nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.316761] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa474e7-0c78-4c02-a876-85aeddefd987 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.416722] env[62508]: DEBUG oslo_vmware.api [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775433, 'name': PowerOnVM_Task, 'duration_secs': 2.06731} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.416999] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.417217] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Took 12.13 seconds to spawn the instance on the hypervisor. [ 1333.417397] env[62508]: DEBUG nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.418219] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ac12d8-582d-46e6-ba37-a80025bf4b28 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.635986] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fbbda1-ee4f-4b6e-9612-bcbb64cc0089 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.650791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a84f89-99fe-4e36-89b8-ad1b9c9528df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.683992] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dcebaa-85f8-4056-a841-1c7937814549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.691671] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ca47cf-8780-4fb9-984f-b7268567a9e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.706425] env[62508]: DEBUG nova.compute.provider_tree [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.765160] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.488406} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.765160] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.765938] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f08d9b3-3aaf-4aa8-8658-ea4fdfa84817 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.788130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] b911f25d-711b-411e-bb2d-2e59386ff2ea/b911f25d-711b-411e-bb2d-2e59386ff2ea.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.788761] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-269ff86d-44f7-4cf9-a718-37299e29cb8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.808793] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1333.808793] env[62508]: value = "task-1775438" [ 1333.808793] env[62508]: _type = "Task" [ 1333.808793] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.809645] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Successfully updated port: 57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1333.821360] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.841639] env[62508]: INFO nova.compute.manager [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Took 33.35 seconds to build instance. [ 1333.890241] env[62508]: DEBUG nova.compute.manager [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received event network-vif-plugged-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.890539] env[62508]: DEBUG oslo_concurrency.lockutils [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] Acquiring lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.890807] env[62508]: DEBUG oslo_concurrency.lockutils [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.890965] env[62508]: DEBUG oslo_concurrency.lockutils [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.891090] env[62508]: DEBUG nova.compute.manager [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] No waiting events found dispatching network-vif-plugged-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1333.891309] env[62508]: WARNING nova.compute.manager [req-c376df74-f109-4f6f-8925-e72a9ea629ab req-a8597662-36d1-4829-aab6-98699191982f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received unexpected event network-vif-plugged-57481a2e-e2c5-417e-abe4-7a7a5562ff08 for instance with vm_state building and task_state spawning. [ 1333.945296] env[62508]: INFO nova.compute.manager [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Took 34.63 seconds to build instance. [ 1334.216038] env[62508]: DEBUG nova.scheduler.client.report [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.318742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.318742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquired lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.318742] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.328682] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775438, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.341013] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3842f6bd-7ae5-4bf6-840f-461b45131e60 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.160s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.448446] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0ff393c5-8921-4f27-9e77-978d0616f2c3 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.088s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.719617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.724111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.015s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.724228] env[62508]: INFO nova.compute.claims [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.754118] env[62508]: INFO nova.scheduler.client.report [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Deleted allocations for instance 827b0887-2132-49af-bcce-cedc7237245d [ 1334.823510] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775438, 'name': ReconfigVM_Task, 'duration_secs': 0.56953} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.823784] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Reconfigured VM instance instance-00000011 to attach disk [datastore1] b911f25d-711b-411e-bb2d-2e59386ff2ea/b911f25d-711b-411e-bb2d-2e59386ff2ea.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.824426] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ea5cb15-cbc5-4cca-887d-5ce423a4e54a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.833503] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1334.833503] env[62508]: value = "task-1775439" [ 1334.833503] env[62508]: _type = "Task" [ 1334.833503] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.842609] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775439, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.844031] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1334.909864] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1334.950949] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1334.994394] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.265330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56392ba0-f2fb-4a04-b18f-b79fb2bcd7ab tempest-InstanceActionsNegativeTestJSON-2065397974 tempest-InstanceActionsNegativeTestJSON-2065397974-project-member] Lock "827b0887-2132-49af-bcce-cedc7237245d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.728s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.345966] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775439, 'name': Rename_Task, 'duration_secs': 0.175966} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.346281] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1335.346518] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f30b0d6c-263a-422d-8053-29aa2791dab0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.356147] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1335.356147] env[62508]: value = "task-1775440" [ 1335.356147] env[62508]: _type = "Task" [ 1335.356147] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.366500] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.372027] env[62508]: DEBUG nova.network.neutron [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [{"id": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "address": "fa:16:3e:58:bc:a4", "network": {"id": "7f060c55-84d7-4d5f-bbf9-b1153e0421dd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-523269330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb5fd2552e42188651162384519043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57481a2e-e2", "ovs_interfaceid": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.372702] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.470657] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.498234] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.785109] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "575ea3dc-850d-4078-8678-41b3c40a4c27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.786307] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.786307] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.786307] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.786307] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.789177] env[62508]: INFO nova.compute.manager [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Terminating instance [ 1335.793144] env[62508]: DEBUG nova.compute.manager [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1335.793144] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1335.793144] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7e2ca4-7045-4ffc-a84f-a89f6a4b8cd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.801232] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1335.802738] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68713470-35c6-4a5d-b2ab-f82dd7bb51f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.815774] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1335.815774] env[62508]: value = "task-1775441" [ 1335.815774] env[62508]: _type = "Task" [ 1335.815774] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.826506] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775441, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.866941] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775440, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.875241] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Releasing lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.875241] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Instance network_info: |[{"id": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "address": "fa:16:3e:58:bc:a4", "network": {"id": "7f060c55-84d7-4d5f-bbf9-b1153e0421dd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-523269330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb5fd2552e42188651162384519043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57481a2e-e2", "ovs_interfaceid": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1335.875919] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:bc:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57481a2e-e2c5-417e-abe4-7a7a5562ff08', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.884848] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Creating folder: Project (5dcb5fd2552e42188651162384519043). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.885204] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed35032d-fe5c-40a8-bb40-3d80924eef9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.897314] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Created folder: Project (5dcb5fd2552e42188651162384519043) in parent group-v368536. [ 1335.897524] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Creating folder: Instances. Parent ref: group-v368586. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.897846] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2d40ebe-272d-4d9d-b383-9708a8e77c7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.910902] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Created folder: Instances in parent group-v368586. [ 1335.911233] env[62508]: DEBUG oslo.service.loopingcall [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.911453] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.911700] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04f8cb01-438c-4a88-98a4-3d9296e7023c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.942564] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.942564] env[62508]: value = "task-1775444" [ 1335.942564] env[62508]: _type = "Task" [ 1335.942564] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.956312] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775444, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.330957] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775441, 'name': PowerOffVM_Task, 'duration_secs': 0.22016} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.331044] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1336.331269] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1336.331596] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26da825f-c88b-4835-990d-0ab3c006c12c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.371560] env[62508]: DEBUG oslo_vmware.api [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775440, 'name': PowerOnVM_Task, 'duration_secs': 0.603772} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.374933] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1336.375250] env[62508]: INFO nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Took 9.78 seconds to spawn the instance on the hypervisor. [ 1336.375365] env[62508]: DEBUG nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1336.376732] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bff527c-b116-4813-8d1f-27dac7fea99c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.415717] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448cb6f6-2fb7-40cf-8b3b-d5d36655a4d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.419501] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1336.419714] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1336.419898] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleting the datastore file [datastore1] 575ea3dc-850d-4078-8678-41b3c40a4c27 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1336.420581] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12daab1a-4edb-442b-9e1b-cadd77f6ff5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.426298] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c27dca7-19cc-4b9c-b294-34e6af55622c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.431322] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1336.431322] env[62508]: value = "task-1775446" [ 1336.431322] env[62508]: _type = "Task" [ 1336.431322] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.465426] env[62508]: DEBUG nova.compute.manager [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.465623] env[62508]: DEBUG nova.compute.manager [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing instance network info cache due to event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1336.465839] env[62508]: DEBUG oslo_concurrency.lockutils [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] Acquiring lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.466371] env[62508]: DEBUG oslo_concurrency.lockutils [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] Acquired lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.466371] env[62508]: DEBUG nova.network.neutron [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1336.473075] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41121879-d631-4ea7-bced-1899fe4b4817 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.479578] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.485109] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775444, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.488149] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5134e52e-fe1a-4f86-bfb8-58fd47382abe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.504896] env[62508]: DEBUG nova.compute.provider_tree [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.829070] env[62508]: DEBUG nova.compute.manager [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Received event network-changed-87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.829444] env[62508]: DEBUG nova.compute.manager [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Refreshing instance network info cache due to event network-changed-87421d0c-fb71-4543-be75-596ccb1584a9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1336.830267] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] Acquiring lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.830494] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] Acquired lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.830757] env[62508]: DEBUG nova.network.neutron [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Refreshing network info cache for port 87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1336.904932] env[62508]: INFO nova.compute.manager [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Took 35.30 seconds to build instance. [ 1336.944154] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.983755] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775444, 'name': CreateVM_Task, 'duration_secs': 0.986045} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.983940] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.984627] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.984794] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.985127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1336.985377] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b130534-674b-4538-8361-7085c93352a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.991130] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1336.991130] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e1fda-945a-6b5f-10bf-aedeb912e28c" [ 1336.991130] env[62508]: _type = "Task" [ 1336.991130] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.003727] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e1fda-945a-6b5f-10bf-aedeb912e28c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.008808] env[62508]: DEBUG nova.scheduler.client.report [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.410644] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8f754ebe-9bea-4171-b7b9-dbd38d70dcf7 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.824s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.443812] env[62508]: DEBUG oslo_vmware.api [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575787} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.447021] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1337.447021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1337.447021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1337.447021] env[62508]: INFO nova.compute.manager [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1337.447021] env[62508]: DEBUG oslo.service.loopingcall [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1337.447252] env[62508]: DEBUG nova.compute.manager [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1337.447252] env[62508]: DEBUG nova.network.neutron [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1337.503561] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e1fda-945a-6b5f-10bf-aedeb912e28c, 'name': SearchDatastore_Task, 'duration_secs': 0.021681} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.503981] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.504321] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.504612] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.505180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.505450] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.505770] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56cd36d3-ed02-428b-8e9e-6ab7f2b7dec5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.517204] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.517801] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1337.523270] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.282s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.523502] env[62508]: DEBUG nova.objects.instance [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lazy-loading 'resources' on Instance uuid 891fac54-2ec4-4d47-8535-a33bd9dfb804 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.524950] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.529024] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.529024] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b687b25-74e3-40e9-bba2-d548ffdba69d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.533815] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1337.533815] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5235de27-d617-9643-a0cd-e00f1068a93a" [ 1337.533815] env[62508]: _type = "Task" [ 1337.533815] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.543521] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5235de27-d617-9643-a0cd-e00f1068a93a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.609895] env[62508]: DEBUG nova.network.neutron [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updated VIF entry in instance network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1337.610498] env[62508]: DEBUG nova.network.neutron [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [{"id": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "address": "fa:16:3e:58:bc:a4", "network": {"id": "7f060c55-84d7-4d5f-bbf9-b1153e0421dd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-523269330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb5fd2552e42188651162384519043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57481a2e-e2", "ovs_interfaceid": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.922637] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1338.037907] env[62508]: DEBUG nova.compute.utils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.039059] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1338.039176] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.052261] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5235de27-d617-9643-a0cd-e00f1068a93a, 'name': SearchDatastore_Task, 'duration_secs': 0.029889} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.053847] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c4be6d8-772e-4d29-9754-b6f39c6c4a4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.060226] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1338.060226] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b92a46-a001-3a4f-b4ae-9e7e937d1865" [ 1338.060226] env[62508]: _type = "Task" [ 1338.060226] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.069248] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b92a46-a001-3a4f-b4ae-9e7e937d1865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.115030] env[62508]: DEBUG oslo_concurrency.lockutils [req-d8190d16-7546-4733-99ac-4d5a6c815f76 req-b8f6ce55-334a-42d3-a9c2-4bb95bc9fb45 service nova] Releasing lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.128785] env[62508]: DEBUG nova.policy [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285fedd2e7fd4d259ca7fc57c3fcbf46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74c45615efbb425fbec8400f6d225892', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1338.444758] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.543673] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1338.560190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d62fbb0-79e1-4a72-8685-94856904633c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.574065] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37299143-e61a-4d00-a688-1f235ac0d837 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.577740] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b92a46-a001-3a4f-b4ae-9e7e937d1865, 'name': SearchDatastore_Task, 'duration_secs': 0.024011} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.578843] env[62508]: DEBUG nova.network.neutron [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updated VIF entry in instance network info cache for port 87421d0c-fb71-4543-be75-596ccb1584a9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1338.579356] env[62508]: DEBUG nova.network.neutron [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updating instance_info_cache with network_info: [{"id": "87421d0c-fb71-4543-be75-596ccb1584a9", "address": "fa:16:3e:0c:7f:32", "network": {"id": "5e2ac7df-b6ed-4cab-8db2-6da4e3cc5fc4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-966305863-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4516318f4ea84bf2bcc39bd70cdca54a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8d3908ba-a3dc-4f88-988b-f997cf7257e2", "external-id": "nsx-vlan-transportzone-116", "segmentation_id": 116, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87421d0c-fb", "ovs_interfaceid": "87421d0c-fb71-4543-be75-596ccb1584a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.582563] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.582966] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd/ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.584564] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12d66bfd-8cec-406e-951f-489be8c97315 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.618686] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18be8529-7db7-4718-8656-43fe3cb10d24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.622701] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1338.622701] env[62508]: value = "task-1775447" [ 1338.622701] env[62508]: _type = "Task" [ 1338.622701] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.631102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36e54a3-ddc1-4c22-a496-449fd7a43969 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.638922] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.650858] env[62508]: DEBUG nova.compute.provider_tree [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.875388] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Successfully created port: d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.909217] env[62508]: DEBUG nova.compute.manager [req-43d10b8f-93d4-4a44-a691-f415784819a9 req-089e7132-e2c8-4177-bb9c-cd01515e02f5 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Received event network-vif-deleted-a4972a5c-7644-49db-a0bb-51bc62415b49 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1338.909466] env[62508]: INFO nova.compute.manager [req-43d10b8f-93d4-4a44-a691-f415784819a9 req-089e7132-e2c8-4177-bb9c-cd01515e02f5 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Neutron deleted interface a4972a5c-7644-49db-a0bb-51bc62415b49; detaching it from the instance and deleting it from the info cache [ 1338.909642] env[62508]: DEBUG nova.network.neutron [req-43d10b8f-93d4-4a44-a691-f415784819a9 req-089e7132-e2c8-4177-bb9c-cd01515e02f5 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.084366] env[62508]: DEBUG nova.network.neutron [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.088079] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5e1c478-20ad-40ec-bf74-58639e9c6b61 req-12eb7f54-dfb8-489f-b20b-779646381563 service nova] Releasing lock "refresh_cache-7d23d8f0-d7a9-4236-ad28-208e77b72138" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.139092] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775447, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508822} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.139426] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd/ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.139646] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.140324] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2416a1ec-2eed-4967-85b3-b22c6e579cad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.148837] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1339.148837] env[62508]: value = "task-1775448" [ 1339.148837] env[62508]: _type = "Task" [ 1339.148837] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.154098] env[62508]: DEBUG nova.scheduler.client.report [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1339.165250] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.414112] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfb4db8d-58fc-4860-b516-02856cae2a6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.424768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830159e8-e4bc-4856-a43f-e942eb58ff59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.455127] env[62508]: DEBUG nova.compute.manager [req-43d10b8f-93d4-4a44-a691-f415784819a9 req-089e7132-e2c8-4177-bb9c-cd01515e02f5 service nova] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Detach interface failed, port_id=a4972a5c-7644-49db-a0bb-51bc62415b49, reason: Instance 575ea3dc-850d-4078-8678-41b3c40a4c27 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1339.560974] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1339.590420] env[62508]: INFO nova.compute.manager [-] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Took 2.14 seconds to deallocate network for instance. [ 1339.592924] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.593983] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.593983] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.593983] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.593983] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.593983] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.594134] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.594213] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.594413] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.594527] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.594703] env[62508]: DEBUG nova.virt.hardware [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.597975] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6251af5e-9584-4b38-bd4b-33e61225a6c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.614220] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66a47c2-8724-43d0-ba93-93b0dc7270cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.661194] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06848} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.661522] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.662353] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30d77fa-acf5-4651-a6fb-eae22d3e8476 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.668687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.681416] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 34.084s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.693030] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd/ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.694266] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dc26318-df22-476a-ac00-54690f2c3695 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.713400] env[62508]: INFO nova.scheduler.client.report [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Deleted allocations for instance 891fac54-2ec4-4d47-8535-a33bd9dfb804 [ 1339.718502] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1339.718502] env[62508]: value = "task-1775449" [ 1339.718502] env[62508]: _type = "Task" [ 1339.718502] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.728330] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775449, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.106551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.197831] env[62508]: INFO nova.compute.claims [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1340.228669] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ecdab486-f03b-4037-a991-827121575e8d tempest-ServersAaction247Test-1583780510 tempest-ServersAaction247Test-1583780510-project-member] Lock "891fac54-2ec4-4d47-8535-a33bd9dfb804" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.794s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.238423] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.707357] env[62508]: INFO nova.compute.resource_tracker [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating resource usage from migration 9db522bb-cefe-493d-ba92-c293c83e3634 [ 1341.527270] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Successfully updated port: d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.529955] env[62508]: DEBUG nova.compute.manager [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received event network-vif-plugged-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1341.530270] env[62508]: DEBUG oslo_concurrency.lockutils [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] Acquiring lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.530716] env[62508]: DEBUG oslo_concurrency.lockutils [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.530716] env[62508]: DEBUG oslo_concurrency.lockutils [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.530716] env[62508]: DEBUG nova.compute.manager [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] No waiting events found dispatching network-vif-plugged-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1341.530879] env[62508]: WARNING nova.compute.manager [req-f199f39a-1d19-4e76-8367-61e6431d6385 req-0798fa48-101e-4aa6-9505-234d7529126e service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received unexpected event network-vif-plugged-d8028a3e-f50d-41fa-b065-a2babc831eec for instance with vm_state building and task_state spawning. [ 1341.531828] env[62508]: DEBUG nova.compute.manager [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Received event network-changed-b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1341.531981] env[62508]: DEBUG nova.compute.manager [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Refreshing instance network info cache due to event network-changed-b7ef30a6-939d-4546-9597-db2b4cc755ae. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1341.532186] env[62508]: DEBUG oslo_concurrency.lockutils [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] Acquiring lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.532380] env[62508]: DEBUG oslo_concurrency.lockutils [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] Acquired lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.532499] env[62508]: DEBUG nova.network.neutron [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Refreshing network info cache for port b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.534648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.535071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.540268] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775449, 'name': ReconfigVM_Task, 'duration_secs': 0.77838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.540751] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Reconfigured VM instance instance-00000012 to attach disk [datastore1] ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd/ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.541370] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbf5a141-1c74-48d5-a330-f1132c93f5dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.548489] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1341.548489] env[62508]: value = "task-1775450" [ 1341.548489] env[62508]: _type = "Task" [ 1341.548489] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.566160] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775450, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.012805] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97ed4a0-6c72-494c-97ab-fede691b306e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.021322] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8915a9b-0990-4e3f-8dda-28523f7d4e37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.055333] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.055333] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.055333] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.063341] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7beeda-e649-42a9-bc55-155ba380035d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.072835] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775450, 'name': Rename_Task, 'duration_secs': 0.29685} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.075297] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.075646] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3cf0193-b857-46aa-a7be-45b18dd7d47f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.078269] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2c178d-abea-4888-8a69-b4b344094d21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.096803] env[62508]: DEBUG nova.compute.provider_tree [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.100334] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1342.100334] env[62508]: value = "task-1775451" [ 1342.100334] env[62508]: _type = "Task" [ 1342.100334] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.110223] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775451, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.603485] env[62508]: DEBUG nova.scheduler.client.report [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1342.620668] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1342.628596] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775451, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.707664] env[62508]: DEBUG nova.network.neutron [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updated VIF entry in instance network info cache for port b7ef30a6-939d-4546-9597-db2b4cc755ae. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1342.707664] env[62508]: DEBUG nova.network.neutron [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updating instance_info_cache with network_info: [{"id": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "address": "fa:16:3e:d4:1b:46", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ef30a6-93", "ovs_interfaceid": "b7ef30a6-939d-4546-9597-db2b4cc755ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.806904] env[62508]: DEBUG nova.network.neutron [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.120669] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.439s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.120870] env[62508]: INFO nova.compute.manager [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Migrating [ 1343.121110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.121681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.122699] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.123432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.106s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.124948] env[62508]: INFO nova.compute.claims [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1343.128312] env[62508]: INFO nova.compute.rpcapi [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1343.129671] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.184241] env[62508]: DEBUG nova.compute.manager [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1343.184456] env[62508]: DEBUG nova.compute.manager [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing instance network info cache due to event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1343.184643] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.209753] env[62508]: DEBUG oslo_concurrency.lockutils [req-de49a77b-fd18-4c00-accd-ae6c79625985 req-48fb11d3-5620-4ae4-9670-f93a3a317735 service nova] Releasing lock "refresh_cache-b911f25d-711b-411e-bb2d-2e59386ff2ea" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.309702] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.313145] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Instance network_info: |[{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1343.313611] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.313681] env[62508]: DEBUG nova.network.neutron [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.318022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:df:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8028a3e-f50d-41fa-b065-a2babc831eec', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.324117] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating folder: Project (74c45615efbb425fbec8400f6d225892). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.324990] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dac687b7-b1de-438d-b8b3-ef84e14bc76b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.338071] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created folder: Project (74c45615efbb425fbec8400f6d225892) in parent group-v368536. [ 1343.338347] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating folder: Instances. Parent ref: group-v368589. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.338539] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aed614bf-52d0-4e0a-9d8d-6f1bfa7ed773 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.348482] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created folder: Instances in parent group-v368589. [ 1343.348792] env[62508]: DEBUG oslo.service.loopingcall [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1343.348907] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1343.349124] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20e6593a-3c1a-4d34-97f5-a99a8925bd41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.371619] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1343.371619] env[62508]: value = "task-1775454" [ 1343.371619] env[62508]: _type = "Task" [ 1343.371619] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.381250] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775454, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.620894] env[62508]: DEBUG oslo_vmware.api [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775451, 'name': PowerOnVM_Task, 'duration_secs': 1.174498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.621418] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.621828] env[62508]: INFO nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Took 11.01 seconds to spawn the instance on the hypervisor. [ 1343.622446] env[62508]: DEBUG nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1343.624125] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a1b2a8-0001-45af-956d-f5d2228ed604 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.647831] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.648109] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.648359] env[62508]: DEBUG nova.network.neutron [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1343.792162] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.792162] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.888693] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775454, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.070524] env[62508]: DEBUG nova.network.neutron [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updated VIF entry in instance network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1344.070913] env[62508]: DEBUG nova.network.neutron [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.150109] env[62508]: INFO nova.compute.manager [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Took 41.38 seconds to build instance. [ 1344.383877] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775454, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.492733] env[62508]: DEBUG nova.network.neutron [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.574739] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d46263c-ce52-4662-a5d9-a63b2043487d req-674c3786-2ccd-4a66-b742-a580fc290862 service nova] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.648544] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e978d0d4-66d2-44af-bf0b-8fc7f3a941b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.655949] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d744f29-c5d4-47c5-a66b-dcf3c62fffce tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.576s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.658399] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2937f809-60b9-40e8-9791-6c3a2e66c9ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.694033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b857512-845e-40ef-8af7-a847b7df7f68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.702371] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db8020f-e566-415e-87f2-e4581f346277 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.720870] env[62508]: DEBUG nova.compute.provider_tree [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.888102] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775454, 'name': CreateVM_Task, 'duration_secs': 1.404201} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.888102] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.888102] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.888102] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.888102] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.888102] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c8266d1-d5d5-4409-99cc-2c9a24657ecf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.891145] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1344.891145] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed23ee-e239-6eb4-faea-6e6a834b03db" [ 1344.891145] env[62508]: _type = "Task" [ 1344.891145] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.900832] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed23ee-e239-6eb4-faea-6e6a834b03db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.997452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.162435] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1345.224779] env[62508]: DEBUG nova.scheduler.client.report [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1345.255945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.255945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.402434] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed23ee-e239-6eb4-faea-6e6a834b03db, 'name': SearchDatastore_Task, 'duration_secs': 0.010358} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.402756] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.402982] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.403260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.403411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.403593] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.403847] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bc9698a-f44b-4c38-88f5-f0e3d91ec426 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.414418] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.414608] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.415351] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7ffc33-3213-4b91-aca3-9038fff5967b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.425111] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1345.425111] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5229531d-5fb1-0a68-b18e-8078b21d200c" [ 1345.425111] env[62508]: _type = "Task" [ 1345.425111] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.437081] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5229531d-5fb1-0a68-b18e-8078b21d200c, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.437991] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0be84b5-3052-4338-931b-468b5d633389 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.443220] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1345.443220] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52507a15-d392-3fb7-9f0f-423f21525076" [ 1345.443220] env[62508]: _type = "Task" [ 1345.443220] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.452020] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52507a15-d392-3fb7-9f0f-423f21525076, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.686022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.730801] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.731528] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1345.734374] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.359s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.734610] env[62508]: DEBUG nova.objects.instance [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lazy-loading 'resources' on Instance uuid d32a3a5d-17d0-4a79-b76a-371cdd170ee0 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1345.954583] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52507a15-d392-3fb7-9f0f-423f21525076, 'name': SearchDatastore_Task, 'duration_secs': 0.009564} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.954734] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.954984] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] de69dbf0-86f1-4b05-a9db-8b9afaabe49c/de69dbf0-86f1-4b05-a9db-8b9afaabe49c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.955415] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63ab532b-086c-4233-a319-e9f0ad59da4c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.962714] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1345.962714] env[62508]: value = "task-1775455" [ 1345.962714] env[62508]: _type = "Task" [ 1345.962714] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.971157] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.047811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.048083] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.240837] env[62508]: DEBUG nova.compute.utils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1346.244334] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1346.244334] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1346.336796] env[62508]: DEBUG nova.policy [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77d5728705c24b55a68eeecf33e90376', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb642c0539ac42a595c2c9817f39a178', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1346.480020] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775455, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.511760] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b91a33-7f82-4a4e-bef8-194825dc7c82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.533238] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1346.636966] env[62508]: DEBUG nova.compute.manager [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1346.636966] env[62508]: DEBUG nova.compute.manager [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing instance network info cache due to event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1346.636966] env[62508]: DEBUG oslo_concurrency.lockutils [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] Acquiring lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.636966] env[62508]: DEBUG oslo_concurrency.lockutils [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] Acquired lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.637106] env[62508]: DEBUG nova.network.neutron [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1346.746645] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1346.821042] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Successfully created port: 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.830186] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aad27f0-395d-4fff-b2eb-4b7b861c5d10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.839520] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186c4037-6ed1-4d0f-9586-ca8d55e854fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.875393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301b19fd-f9b4-461a-b0d6-63e23a0f0e19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.883329] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61547ea-c5af-4450-b7a9-b21873190977 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.898105] env[62508]: DEBUG nova.compute.provider_tree [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.975513] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575953} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.975784] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] de69dbf0-86f1-4b05-a9db-8b9afaabe49c/de69dbf0-86f1-4b05-a9db-8b9afaabe49c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1346.975987] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1346.976583] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40d3ae00-8d23-4982-805a-6a5a12f39497 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.983412] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1346.983412] env[62508]: value = "task-1775456" [ 1346.983412] env[62508]: _type = "Task" [ 1346.983412] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.991447] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775456, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.043589] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.043589] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89c3416c-670e-4a6e-8c1a-dc13bc0ecb3f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.050797] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1347.050797] env[62508]: value = "task-1775457" [ 1347.050797] env[62508]: _type = "Task" [ 1347.050797] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.060713] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.405661] env[62508]: DEBUG nova.scheduler.client.report [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1347.483390] env[62508]: DEBUG nova.network.neutron [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updated VIF entry in instance network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1347.483717] env[62508]: DEBUG nova.network.neutron [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [{"id": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "address": "fa:16:3e:58:bc:a4", "network": {"id": "7f060c55-84d7-4d5f-bbf9-b1153e0421dd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-523269330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb5fd2552e42188651162384519043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57481a2e-e2", "ovs_interfaceid": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.496031] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775456, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.496299] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.497165] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415f5348-c887-47e4-807a-c0144223867f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.519953] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] de69dbf0-86f1-4b05-a9db-8b9afaabe49c/de69dbf0-86f1-4b05-a9db-8b9afaabe49c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.520494] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c9e4576-147d-4a99-9dbe-a54c647ce763 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.539398] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1347.539398] env[62508]: value = "task-1775458" [ 1347.539398] env[62508]: _type = "Task" [ 1347.539398] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.547475] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.559966] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775457, 'name': PowerOffVM_Task, 'duration_secs': 0.256842} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.560376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1347.560659] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1347.759239] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1347.785448] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1347.785703] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1347.785862] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.786052] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1347.786201] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.786412] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1347.786671] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1347.786833] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1347.787012] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1347.787183] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1347.787358] env[62508]: DEBUG nova.virt.hardware [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1347.788249] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e521b41f-8ab2-46e0-bb9a-b1af03636242 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.796521] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b161b0-b770-4d74-9045-355c6de82be8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.911238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.177s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.913839] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.256s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.914097] env[62508]: DEBUG nova.objects.instance [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lazy-loading 'resources' on Instance uuid 7339c22a-05c9-4ddd-93df-0326cbe96ca4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.947744] env[62508]: INFO nova.scheduler.client.report [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Deleted allocations for instance d32a3a5d-17d0-4a79-b76a-371cdd170ee0 [ 1347.986606] env[62508]: DEBUG oslo_concurrency.lockutils [req-e3decc53-07ef-4c23-bb91-7b0b609976e4 req-697613fd-b9d0-4f62-932f-7e7e0515cb95 service nova] Releasing lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.050067] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.068021] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1348.068021] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1348.068021] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1348.068021] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1348.068256] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1348.068256] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1348.068256] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1348.068256] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1348.068598] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1348.068893] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1348.069196] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1348.074246] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-123c6d52-522e-470a-8ae5-eb16c3989225 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.095171] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1348.095171] env[62508]: value = "task-1775459" [ 1348.095171] env[62508]: _type = "Task" [ 1348.095171] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.106576] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.459676] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f4c355-4386-4917-a247-f18d48b96a8a tempest-ServerExternalEventsTest-1715241971 tempest-ServerExternalEventsTest-1715241971-project-member] Lock "d32a3a5d-17d0-4a79-b76a-371cdd170ee0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.161s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.555797] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775458, 'name': ReconfigVM_Task, 'duration_secs': 0.80026} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.556109] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Reconfigured VM instance instance-00000013 to attach disk [datastore1] de69dbf0-86f1-4b05-a9db-8b9afaabe49c/de69dbf0-86f1-4b05-a9db-8b9afaabe49c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.556752] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-333b07e1-ec07-404a-93c6-6f46be376b64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.563249] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1348.563249] env[62508]: value = "task-1775460" [ 1348.563249] env[62508]: _type = "Task" [ 1348.563249] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.573543] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775460, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.609357] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775459, 'name': ReconfigVM_Task, 'duration_secs': 0.1855} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.609659] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1348.834641] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6398dc6b-4820-4101-848d-cb4ae19c927c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.842528] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c51870-92c8-41e3-bbe3-0449c17e614e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.872283] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff483a51-17a1-4ca1-bda3-07139c3127d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.880064] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ee9ec6-255a-4fca-9164-173496b6ea23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.893575] env[62508]: DEBUG nova.compute.provider_tree [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.896633] env[62508]: DEBUG nova.compute.manager [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-vif-plugged-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1348.896841] env[62508]: DEBUG oslo_concurrency.lockutils [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] Acquiring lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.897053] env[62508]: DEBUG oslo_concurrency.lockutils [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.897219] env[62508]: DEBUG oslo_concurrency.lockutils [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.897415] env[62508]: DEBUG nova.compute.manager [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] No waiting events found dispatching network-vif-plugged-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1348.897533] env[62508]: WARNING nova.compute.manager [req-d0ae7496-dff4-4c1a-9e3a-9c70f120dcb8 req-e560b5b3-c8f4-4f68-8df1-dd6bf55232de service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received unexpected event network-vif-plugged-119423da-6eda-483b-b0aa-050fdbcd42e6 for instance with vm_state building and task_state spawning. [ 1348.898570] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Successfully updated port: 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.075350] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775460, 'name': Rename_Task, 'duration_secs': 0.134469} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.075742] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.076037] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-773d4cb9-e521-4d59-aaa5-a6e06a8680ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.082180] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1349.082180] env[62508]: value = "task-1775461" [ 1349.082180] env[62508]: _type = "Task" [ 1349.082180] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.089599] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.118942] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1349.119210] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1349.119369] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.119555] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1349.119697] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.119963] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1349.120121] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1349.120302] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1349.120413] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1349.120591] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1349.120764] env[62508]: DEBUG nova.virt.hardware [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1349.126252] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1349.126570] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62398800-9b73-429e-befe-a8046968edc2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.144913] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1349.144913] env[62508]: value = "task-1775462" [ 1349.144913] env[62508]: _type = "Task" [ 1349.144913] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.153359] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775462, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.400601] env[62508]: DEBUG nova.scheduler.client.report [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.403934] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.404103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.404251] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1349.593178] env[62508]: DEBUG oslo_vmware.api [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775461, 'name': PowerOnVM_Task, 'duration_secs': 0.459661} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.593178] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1349.593178] env[62508]: INFO nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1349.593366] env[62508]: DEBUG nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1349.594228] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501486e7-f231-4ad4-a7e6-de3619ec8779 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.664076] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775462, 'name': ReconfigVM_Task, 'duration_secs': 0.168788} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.664597] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1349.665713] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d675b6-7eb8-4c25-8cc2-4b905fd5e12e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.698348] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.699175] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6c4eafd-3cfc-4342-b535-cfb26f51cbdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.721548] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1349.721548] env[62508]: value = "task-1775463" [ 1349.721548] env[62508]: _type = "Task" [ 1349.721548] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.733985] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.913868] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.916375] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.447s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.917178] env[62508]: DEBUG nova.objects.instance [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1349.946250] env[62508]: INFO nova.scheduler.client.report [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Deleted allocations for instance 7339c22a-05c9-4ddd-93df-0326cbe96ca4 [ 1349.970650] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.116901] env[62508]: INFO nova.compute.manager [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Took 45.43 seconds to build instance. [ 1350.153319] env[62508]: DEBUG nova.network.neutron [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.232318] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775463, 'name': ReconfigVM_Task, 'duration_secs': 0.263011} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.233191] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5/868cf942-f348-488d-b00a-af4c8b5efda5.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.233454] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1350.454408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e141227-29d4-41e8-a9ca-7d1db7cba5d2 tempest-ServerDiagnosticsNegativeTest-424824256 tempest-ServerDiagnosticsNegativeTest-424824256-project-member] Lock "7339c22a-05c9-4ddd-93df-0326cbe96ca4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.750s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.620033] env[62508]: DEBUG oslo_concurrency.lockutils [None req-209c4e5d-2c96-4a5b-87ba-f6821d9a63a5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.653s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.657675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.657894] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Instance network_info: |[{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1350.658726] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:21:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '119423da-6eda-483b-b0aa-050fdbcd42e6', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.666773] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Creating folder: Project (eb642c0539ac42a595c2c9817f39a178). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.667140] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5faa3414-8f4e-44b9-879b-3735a076190a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.679625] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Created folder: Project (eb642c0539ac42a595c2c9817f39a178) in parent group-v368536. [ 1350.679625] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Creating folder: Instances. Parent ref: group-v368592. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.679625] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13c623a8-fdad-4874-9d1e-7badba5b9032 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.692249] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Created folder: Instances in parent group-v368592. [ 1350.692249] env[62508]: DEBUG oslo.service.loopingcall [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.692339] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.693302] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-083deb9d-6ae2-4c3a-b0c6-852fdbe53215 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.712621] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.712621] env[62508]: value = "task-1775466" [ 1350.712621] env[62508]: _type = "Task" [ 1350.712621] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.720826] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775466, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.743825] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06679e21-2559-44f2-8751-71209c9332a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.766029] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510564ad-b470-45b1-be82-08d95fac788b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.784480] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1350.930538] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a7b0bd9-542f-4baa-8079-e42d7b064ae0 tempest-ServersAdmin275Test-563822356 tempest-ServersAdmin275Test-563822356-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.932018] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.019s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.935943] env[62508]: INFO nova.compute.claims [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1350.940655] env[62508]: DEBUG nova.compute.manager [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.940836] env[62508]: DEBUG nova.compute.manager [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing instance network info cache due to event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1350.941064] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.941261] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.941389] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1351.015942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.015942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.015942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.016185] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.016232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.018746] env[62508]: INFO nova.compute.manager [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Terminating instance [ 1351.024247] env[62508]: DEBUG nova.compute.manager [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1351.024517] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1351.025758] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9b8361-3f97-4d9b-b775-2dcaf5b42048 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.035031] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.035031] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19b00b03-c5c6-4d14-9bd0-cf9e108a03db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.044039] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1351.044039] env[62508]: value = "task-1775467" [ 1351.044039] env[62508]: _type = "Task" [ 1351.044039] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.052871] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.124469] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1351.223603] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775466, 'name': CreateVM_Task, 'duration_secs': 0.361925} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.223804] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1351.224784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.224784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.225016] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1351.225290] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de53b2f0-7fa2-45d6-b970-4f3fdc10aa9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.230173] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1351.230173] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527293eb-5d8e-201a-7ee8-3249e73c35e7" [ 1351.230173] env[62508]: _type = "Task" [ 1351.230173] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.239434] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527293eb-5d8e-201a-7ee8-3249e73c35e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.368944] env[62508]: DEBUG nova.network.neutron [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Port f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1351.554492] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775467, 'name': PowerOffVM_Task, 'duration_secs': 0.320949} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.554904] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1351.555185] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1351.555532] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2869da0a-d205-44ad-9322-523ddbd8bad3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.645234] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1351.645446] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1351.645653] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Deleting the datastore file [datastore1] ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1351.645869] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2676b81-6aa5-454b-92f1-40e4a4767c77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.653264] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for the task: (returnval){ [ 1351.653264] env[62508]: value = "task-1775469" [ 1351.653264] env[62508]: _type = "Task" [ 1351.653264] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.663517] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.668766] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.740800] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527293eb-5d8e-201a-7ee8-3249e73c35e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010577} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.741240] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.741487] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.741777] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.741964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.742221] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1351.742512] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4387f3f7-05df-4a98-b6e3-a447435d0af0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.750743] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1351.750914] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1351.751669] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a70dcf0d-898c-4c03-92e2-f680479376c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.754502] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updated VIF entry in instance network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.754828] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.759157] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1351.759157] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52723675-8998-39b1-ebf7-08ec2ac9e271" [ 1351.759157] env[62508]: _type = "Task" [ 1351.759157] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.766616] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52723675-8998-39b1-ebf7-08ec2ac9e271, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.168797] env[62508]: DEBUG oslo_vmware.api [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Task: {'id': task-1775469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155421} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.169716] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1352.170544] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1352.170820] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1352.171140] env[62508]: INFO nova.compute.manager [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1352.171476] env[62508]: DEBUG oslo.service.loopingcall [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1352.171719] env[62508]: DEBUG nova.compute.manager [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1352.171893] env[62508]: DEBUG nova.network.neutron [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1352.258888] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.259230] env[62508]: DEBUG nova.compute.manager [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1352.259421] env[62508]: DEBUG nova.compute.manager [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing instance network info cache due to event network-changed-57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1352.259647] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Acquiring lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.259786] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Acquired lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.259957] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Refreshing network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.275704] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52723675-8998-39b1-ebf7-08ec2ac9e271, 'name': SearchDatastore_Task, 'duration_secs': 0.009014} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.277351] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6db9c86-95b0-4601-957d-a82c7ef41fad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.286697] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1352.286697] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1f5ef-b466-11aa-c833-31ef7e95b0c2" [ 1352.286697] env[62508]: _type = "Task" [ 1352.286697] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.300600] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1f5ef-b466-11aa-c833-31ef7e95b0c2, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.303938] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.303938] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/42eb98a9-e341-4a17-9d76-2a2c37efc1a1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1352.304325] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-363c46a4-481d-460c-ac80-dfec2ce06b07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.311803] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1352.311803] env[62508]: value = "task-1775470" [ 1352.311803] env[62508]: _type = "Task" [ 1352.311803] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.320517] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.403038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.403038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.403038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.531021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d846f243-8d36-42ad-baa4-74869ac407b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.540150] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae2a670-4d7f-4ede-ae5e-56e0f89abb15 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.578209] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0eb90c-92b2-4daf-a576-758f7556a595 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.587441] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b504de8d-194b-4a98-9b4c-931903babd69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.605226] env[62508]: DEBUG nova.compute.provider_tree [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.823114] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483635} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.823618] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/42eb98a9-e341-4a17-9d76-2a2c37efc1a1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.823963] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.824316] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db17fbe5-8afd-4227-a315-02dd103a3fe3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.832065] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1352.832065] env[62508]: value = "task-1775471" [ 1352.832065] env[62508]: _type = "Task" [ 1352.832065] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.839595] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775471, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.975162] env[62508]: DEBUG nova.compute.manager [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1352.975162] env[62508]: DEBUG nova.compute.manager [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing instance network info cache due to event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1352.975162] env[62508]: DEBUG oslo_concurrency.lockutils [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.975162] env[62508]: DEBUG oslo_concurrency.lockutils [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.975892] env[62508]: DEBUG nova.network.neutron [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.108388] env[62508]: DEBUG nova.scheduler.client.report [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1353.315652] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updated VIF entry in instance network info cache for port 57481a2e-e2c5-417e-abe4-7a7a5562ff08. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.315652] env[62508]: DEBUG nova.network.neutron [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [{"id": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "address": "fa:16:3e:58:bc:a4", "network": {"id": "7f060c55-84d7-4d5f-bbf9-b1153e0421dd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-523269330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb5fd2552e42188651162384519043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57481a2e-e2", "ovs_interfaceid": "57481a2e-e2c5-417e-abe4-7a7a5562ff08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.344576] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775471, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063946} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.345171] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1353.346098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca89db3-e16d-4de5-8349-d4edf937f7a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.365073] env[62508]: DEBUG nova.network.neutron [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.377966] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/42eb98a9-e341-4a17-9d76-2a2c37efc1a1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1353.378773] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c909ae5d-0c58-4b12-89e6-3e9f61056850 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.399877] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1353.399877] env[62508]: value = "task-1775472" [ 1353.399877] env[62508]: _type = "Task" [ 1353.399877] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.415252] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775472, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.486114] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.486114] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.486114] env[62508]: DEBUG nova.network.neutron [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.615873] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.618632] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1353.619586] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.414s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.619952] env[62508]: DEBUG nova.objects.instance [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lazy-loading 'resources' on Instance uuid 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1353.819070] env[62508]: DEBUG oslo_concurrency.lockutils [req-4936ee75-dc13-4443-be05-30927c749a22 req-5aeafe83-dded-49c0-bbea-1eba82fb266f service nova] Releasing lock "refresh_cache-ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.879732] env[62508]: INFO nova.compute.manager [-] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Took 1.71 seconds to deallocate network for instance. [ 1353.913179] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775472, 'name': ReconfigVM_Task, 'duration_secs': 0.278193} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.913764] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/42eb98a9-e341-4a17-9d76-2a2c37efc1a1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.914430] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-daf2e6f4-cfd4-44f8-9411-f6a36a5e468b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.921255] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1353.921255] env[62508]: value = "task-1775473" [ 1353.921255] env[62508]: _type = "Task" [ 1353.921255] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.932926] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775473, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.937831] env[62508]: DEBUG nova.network.neutron [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updated VIF entry in instance network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.938178] env[62508]: DEBUG nova.network.neutron [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.130360] env[62508]: DEBUG nova.compute.utils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1354.132379] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1354.133061] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1354.208998] env[62508]: DEBUG nova.policy [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b84719d9c5842a5a144d91691b5fc12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d188021b90b4a7cb04521e090d0c1c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1354.283052] env[62508]: DEBUG nova.network.neutron [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.387661] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.438905] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775473, 'name': Rename_Task, 'duration_secs': 0.134833} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.439135] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.439997] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5477f269-42d3-4f17-92e0-225f1ec7c5db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.441291] env[62508]: DEBUG oslo_concurrency.lockutils [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.441750] env[62508]: DEBUG nova.compute.manager [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Received event network-vif-deleted-57481a2e-e2c5-417e-abe4-7a7a5562ff08 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1354.441806] env[62508]: INFO nova.compute.manager [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Neutron deleted interface 57481a2e-e2c5-417e-abe4-7a7a5562ff08; detaching it from the instance and deleting it from the info cache [ 1354.441949] env[62508]: DEBUG nova.network.neutron [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.450026] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1354.450026] env[62508]: value = "task-1775474" [ 1354.450026] env[62508]: _type = "Task" [ 1354.450026] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.460189] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.635251] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1354.642048] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6d58cc-3260-4a87-ac45-ec62c59ed20b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.649976] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7861952-b533-4c4e-a2d5-dd17ee12f3ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.684911] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcd870c-325f-43d3-b1b2-ed44eaa067f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.694515] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeb33e6-691f-450f-a904-320ff0ef2518 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.713955] env[62508]: DEBUG nova.compute.provider_tree [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.736122] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Successfully created port: c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1354.784483] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.958406] env[62508]: DEBUG oslo_vmware.api [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775474, 'name': PowerOnVM_Task, 'duration_secs': 0.448782} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.958618] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bcce7d1-3291-46d3-abad-20b6c0179980 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.960770] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.960770] env[62508]: INFO nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1354.960880] env[62508]: DEBUG nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1354.961673] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1daa642-4f91-44c0-b26a-8e3d97c5a9cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.975309] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d514f81-f70b-49cf-921a-d5fd06c091cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.004584] env[62508]: DEBUG nova.compute.manager [req-a09770b9-029c-4482-bd34-2248d492bc13 req-22df6c2b-8f04-49bb-a5f9-b47ba3eb0d87 service nova] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Detach interface failed, port_id=57481a2e-e2c5-417e-abe4-7a7a5562ff08, reason: Instance ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1355.217769] env[62508]: DEBUG nova.scheduler.client.report [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.315518] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19759dc-fefd-4b4e-bd8e-871f4dbf64d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.341244] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad07a38-0690-4254-8b7d-985e35ff6193 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.349703] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1355.384655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "db74146d-abc3-4d48-be1b-6ad471794dbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.384905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.482276] env[62508]: INFO nova.compute.manager [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Took 48.49 seconds to build instance. [ 1355.648380] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1355.675019] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1355.675019] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1355.675019] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1355.675317] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1355.675317] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1355.675457] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1355.675804] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1355.676136] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1355.676453] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1355.676759] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1355.677084] env[62508]: DEBUG nova.virt.hardware [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1355.678114] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c69739-a3da-4eaa-be9e-f00c4790f4d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.686493] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b1113e-20bc-4f70-abeb-a56861ca0887 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.728022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.106s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.728022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.401s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.730602] env[62508]: INFO nova.compute.claims [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.767255] env[62508]: INFO nova.scheduler.client.report [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Deleted allocations for instance 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b [ 1355.860150] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1355.860910] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fc914f4-edd0-47fa-8f81-d2d655ae97a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.873817] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1355.873817] env[62508]: value = "task-1775475" [ 1355.873817] env[62508]: _type = "Task" [ 1355.873817] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.884116] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.986603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a56bc859-cfa2-456c-95ab-c3868ca3af75 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.366s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.106560] env[62508]: INFO nova.compute.manager [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Rescuing [ 1356.106560] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.106560] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.106560] env[62508]: DEBUG nova.network.neutron [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1356.277889] env[62508]: DEBUG oslo_concurrency.lockutils [None req-790b0d75-1038-4fbc-aeee-4a59c3e1ac5b tempest-ServersAdmin275Test-925415751 tempest-ServersAdmin275Test-925415751-project-member] Lock "96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.970s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.386164] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775475, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.489461] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.728945] env[62508]: DEBUG nova.compute.manager [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received event network-vif-plugged-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1356.729203] env[62508]: DEBUG oslo_concurrency.lockutils [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.729416] env[62508]: DEBUG oslo_concurrency.lockutils [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.729587] env[62508]: DEBUG oslo_concurrency.lockutils [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.729752] env[62508]: DEBUG nova.compute.manager [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] No waiting events found dispatching network-vif-plugged-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1356.729925] env[62508]: WARNING nova.compute.manager [req-ad4806db-67d8-4309-9411-dfca6e058f34 req-15330d7f-582e-4871-bf2b-3ea190f45ff5 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received unexpected event network-vif-plugged-c1117e3d-5c3e-4513-832b-12ce45699cab for instance with vm_state building and task_state spawning. [ 1356.858151] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Successfully updated port: c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1356.888972] env[62508]: DEBUG oslo_vmware.api [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775475, 'name': PowerOnVM_Task, 'duration_secs': 0.584554} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.893120] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.893120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbf591e2-cf6d-47ec-a55a-17b0e8b3949c tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance '868cf942-f348-488d-b00a-af4c8b5efda5' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1357.000689] env[62508]: DEBUG nova.network.neutron [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.021984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.248659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25d51c7-4dda-4a5b-864b-637e48ec0d50 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.261265] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1e6095-193f-4492-a270-6227a6f82e70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.303499] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befb715c-09b6-4001-a983-5535427ed99a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.312801] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a0d91c-c359-4517-9b24-1bb52fd7b41a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.329074] env[62508]: DEBUG nova.compute.provider_tree [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.363510] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.363510] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.363510] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1357.505103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.835195] env[62508]: DEBUG nova.scheduler.client.report [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.910357] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1358.043193] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1358.043551] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-954a62f2-1433-4d31-bf5c-2588b482535f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.054793] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1358.054793] env[62508]: value = "task-1775476" [ 1358.054793] env[62508]: _type = "Task" [ 1358.054793] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.072905] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.214107] env[62508]: DEBUG nova.network.neutron [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [{"id": "c1117e3d-5c3e-4513-832b-12ce45699cab", "address": "fa:16:3e:bf:62:e4", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1117e3d-5c", "ovs_interfaceid": "c1117e3d-5c3e-4513-832b-12ce45699cab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.342857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.343429] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1358.348016] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.121s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.348252] env[62508]: DEBUG nova.objects.instance [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lazy-loading 'resources' on Instance uuid fa00f4fe-3bb2-4e17-be22-8a1fda502f65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.577372] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775476, 'name': PowerOffVM_Task, 'duration_secs': 0.18732} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.577684] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.578592] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788d612a-3a3e-4399-91ba-7aaf9bad6c41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.598767] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d7b3d5-2d9e-4029-a82e-4c223f0a9302 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.632923] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1358.633274] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2765e56f-904b-41aa-afbd-24376c2f91e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.640700] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1358.640700] env[62508]: value = "task-1775477" [ 1358.640700] env[62508]: _type = "Task" [ 1358.640700] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.649159] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.721214] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.721594] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance network_info: |[{"id": "c1117e3d-5c3e-4513-832b-12ce45699cab", "address": "fa:16:3e:bf:62:e4", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1117e3d-5c", "ovs_interfaceid": "c1117e3d-5c3e-4513-832b-12ce45699cab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1358.722123] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:62:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1117e3d-5c3e-4513-832b-12ce45699cab', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.741100] env[62508]: DEBUG oslo.service.loopingcall [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1358.741431] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.742489] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5ec5b44-7cf6-4e9e-b2a9-a944fbb04f6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.763639] env[62508]: DEBUG nova.compute.manager [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1358.763829] env[62508]: DEBUG nova.compute.manager [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing instance network info cache due to event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1358.764057] env[62508]: DEBUG oslo_concurrency.lockutils [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] Acquiring lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.764204] env[62508]: DEBUG oslo_concurrency.lockutils [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] Acquired lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.764363] env[62508]: DEBUG nova.network.neutron [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing network info cache for port c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1358.770928] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.770928] env[62508]: value = "task-1775478" [ 1358.770928] env[62508]: _type = "Task" [ 1358.770928] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.780222] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775478, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.851806] env[62508]: DEBUG nova.compute.utils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1358.856646] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1358.856837] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.949789] env[62508]: DEBUG nova.policy [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f2e7f5f0ed14887a563bcd3ee3a323d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1af268219a1496fb491ea6353ff551d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1359.152639] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1359.152948] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.153222] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.153371] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.153550] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1359.157326] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-518e6e74-393d-4daf-9a72-0091fa8951b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.166962] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1359.167222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1359.167986] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e275f7-65ce-42d8-b070-fa7fb6723961 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.174115] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1359.174115] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520238a7-b415-e2d3-5ad4-8d73ec04efa7" [ 1359.174115] env[62508]: _type = "Task" [ 1359.174115] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.184836] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520238a7-b415-e2d3-5ad4-8d73ec04efa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.284785] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775478, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.329360] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e2ffec-7481-47e5-833b-2c0e0d3cd44e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.341088] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8902dc-0c38-44b1-a7e4-152b88571925 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.372228] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1359.375764] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d88b35-f5e6-4cc1-a592-2259929889b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.384410] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78bf3ae-6739-44ca-b262-bb682f8a1356 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.398510] env[62508]: DEBUG nova.compute.provider_tree [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.684895] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520238a7-b415-e2d3-5ad4-8d73ec04efa7, 'name': SearchDatastore_Task, 'duration_secs': 0.011917} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.685768] env[62508]: DEBUG nova.network.neutron [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updated VIF entry in instance network info cache for port c1117e3d-5c3e-4513-832b-12ce45699cab. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.686136] env[62508]: DEBUG nova.network.neutron [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [{"id": "c1117e3d-5c3e-4513-832b-12ce45699cab", "address": "fa:16:3e:bf:62:e4", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1117e3d-5c", "ovs_interfaceid": "c1117e3d-5c3e-4513-832b-12ce45699cab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.691037] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ee95d2-aa34-4992-b529-c186d767adbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.696650] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1359.696650] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c2b9a-fa1d-9c99-daea-09ad35a0d40c" [ 1359.696650] env[62508]: _type = "Task" [ 1359.696650] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.703054] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c2b9a-fa1d-9c99-daea-09ad35a0d40c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.725670] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Successfully created port: 129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.781601] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775478, 'name': CreateVM_Task, 'duration_secs': 0.52256} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.781792] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1359.782560] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.783045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.783385] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1359.783650] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff49cbe-3508-4122-bade-e50d73a51a59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.788782] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1359.788782] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3ec29-bffb-d3f4-0851-900a504e8936" [ 1359.788782] env[62508]: _type = "Task" [ 1359.788782] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.799703] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3ec29-bffb-d3f4-0851-900a504e8936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.902834] env[62508]: DEBUG nova.scheduler.client.report [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1360.121605] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.121605] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.121605] env[62508]: DEBUG nova.compute.manager [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Going to confirm migration 1 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1360.191356] env[62508]: DEBUG oslo_concurrency.lockutils [req-962031b2-daf0-40bb-b104-1b38226517eb req-9fd78ca6-d292-4075-b186-f0c4846dc740 service nova] Releasing lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.208025] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c2b9a-fa1d-9c99-daea-09ad35a0d40c, 'name': SearchDatastore_Task, 'duration_secs': 0.040148} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.208025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.208025] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1360.208025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f5b8467-bbce-4ea3-a81c-47de4545c31c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.216649] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1360.216649] env[62508]: value = "task-1775479" [ 1360.216649] env[62508]: _type = "Task" [ 1360.216649] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.225729] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.302073] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3ec29-bffb-d3f4-0851-900a504e8936, 'name': SearchDatastore_Task, 'duration_secs': 0.025472} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.302426] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.302790] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1360.303063] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.303118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.303308] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.304661] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dabca4b-8c85-4da7-afb1-ee2ca1fba99d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.312482] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.312681] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1360.313452] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a5e96ce-97ca-43d7-b678-0dbfab4430a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.320028] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1360.320028] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eebb44-cd39-96a1-f8d4-11c513844b2c" [ 1360.320028] env[62508]: _type = "Task" [ 1360.320028] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.329425] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eebb44-cd39-96a1-f8d4-11c513844b2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.387266] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1360.414189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.417860] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.876s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.418653] env[62508]: DEBUG nova.objects.instance [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lazy-loading 'resources' on Instance uuid 46a524e2-93b0-4726-812f-98e08b6ba0b4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.433051] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1360.433291] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1360.437018] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1360.437253] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1360.437253] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1360.437253] env[62508]: DEBUG nova.virt.hardware [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1360.437253] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c31ad6c-e28b-4525-9728-e7e55bc62c0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.441464] env[62508]: INFO nova.scheduler.client.report [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Deleted allocations for instance fa00f4fe-3bb2-4e17-be22-8a1fda502f65 [ 1360.451114] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700e8f78-c563-4eb9-8cec-14d98f31dc8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.728277] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49127} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.729431] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.729522] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.729737] env[62508]: DEBUG nova.network.neutron [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1360.729851] env[62508]: DEBUG nova.objects.instance [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lazy-loading 'info_cache' on Instance uuid 868cf942-f348-488d-b00a-af4c8b5efda5 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.732164] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1360.732264] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af53d014-749d-4a16-9524-6e0e28f3003f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.770711] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.771788] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b79b413-2132-4804-bef9-25c7547d266b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.793144] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1360.793144] env[62508]: value = "task-1775480" [ 1360.793144] env[62508]: _type = "Task" [ 1360.793144] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.800251] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.830871] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eebb44-cd39-96a1-f8d4-11c513844b2c, 'name': SearchDatastore_Task, 'duration_secs': 0.00867} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.831719] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d8f4321-d515-4bf2-938e-32f89c023fd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.837054] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1360.837054] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527df7ad-f612-0a29-528c-626d9dd686be" [ 1360.837054] env[62508]: _type = "Task" [ 1360.837054] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.845302] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527df7ad-f612-0a29-528c-626d9dd686be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.962634] env[62508]: DEBUG oslo_concurrency.lockutils [None req-74f7a378-3c71-4f9b-a421-d666ede69f64 tempest-ServerDiagnosticsV248Test-553628157 tempest-ServerDiagnosticsV248Test-553628157-project-member] Lock "fa00f4fe-3bb2-4e17-be22-8a1fda502f65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.625s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.300207] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775480, 'name': ReconfigVM_Task, 'duration_secs': 0.287619} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.300703] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.302040] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d113a1-84ac-4aef-8a15-d5274203f4c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.330689] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ff66b3a-8cb4-4760-8fe6-3945572d6439 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.358700] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527df7ad-f612-0a29-528c-626d9dd686be, 'name': SearchDatastore_Task, 'duration_secs': 0.00905} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.359035] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1361.359035] env[62508]: value = "task-1775481" [ 1361.359035] env[62508]: _type = "Task" [ 1361.359035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.359198] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.359451] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e652e59f-9432-41cf-b4a5-0f5cf649b24e/e652e59f-9432-41cf-b4a5-0f5cf649b24e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1361.359761] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54994b9e-e2a0-440f-917e-15a1589c0650 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.377030] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.377030] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1361.377030] env[62508]: value = "task-1775482" [ 1361.377030] env[62508]: _type = "Task" [ 1361.377030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.383869] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.442513] env[62508]: DEBUG nova.compute.manager [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Received event network-vif-plugged-129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.442774] env[62508]: DEBUG oslo_concurrency.lockutils [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] Acquiring lock "73452964-d690-451d-98c3-fba3c3301c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.443029] env[62508]: DEBUG oslo_concurrency.lockutils [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] Lock "73452964-d690-451d-98c3-fba3c3301c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.443203] env[62508]: DEBUG oslo_concurrency.lockutils [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] Lock "73452964-d690-451d-98c3-fba3c3301c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.443370] env[62508]: DEBUG nova.compute.manager [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] No waiting events found dispatching network-vif-plugged-129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1361.443533] env[62508]: WARNING nova.compute.manager [req-4f02c443-ba51-4be3-96af-8827343565c3 req-b49df572-ca1e-45b3-a8bc-a87f301c64c5 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Received unexpected event network-vif-plugged-129ec0b1-e45e-4868-b60c-d9b307a0d56c for instance with vm_state building and task_state spawning. [ 1361.494698] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54738da5-057e-4706-9103-19b70fc3027c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.502823] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf63b7e-c13e-470f-b808-ba6676e155eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.536087] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef836108-50ff-4fee-96ad-13b36b0bd223 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.544577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3846a4ae-0216-49e5-8729-deaee1629c55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.561654] env[62508]: DEBUG nova.compute.provider_tree [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.661780] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Successfully updated port: 129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1361.873155] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775481, 'name': ReconfigVM_Task, 'duration_secs': 0.148242} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.873442] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.873701] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58481246-e213-4ea5-a756-ebe3c661e137 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.881102] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1361.881102] env[62508]: value = "task-1775483" [ 1361.881102] env[62508]: _type = "Task" [ 1361.881102] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.890191] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.895733] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775483, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.066620] env[62508]: DEBUG nova.scheduler.client.report [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1362.156411] env[62508]: DEBUG nova.network.neutron [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.165809] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.165809] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquired lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.165809] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.389381] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775482, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.394533] env[62508]: DEBUG oslo_vmware.api [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775483, 'name': PowerOnVM_Task, 'duration_secs': 0.423844} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.395897] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.398637] env[62508]: DEBUG nova.compute.manager [None req-7ecacfa9-3deb-4148-94a5-0d27c7a4e5af tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.399977] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e76d50-4396-4bbd-b3a4-c648bc5cb0ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.577954] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.580464] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.287s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.582312] env[62508]: INFO nova.compute.claims [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.605607] env[62508]: INFO nova.scheduler.client.report [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Deleted allocations for instance 46a524e2-93b0-4726-812f-98e08b6ba0b4 [ 1362.659328] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.659613] env[62508]: DEBUG nova.objects.instance [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lazy-loading 'migration_context' on Instance uuid 868cf942-f348-488d-b00a-af4c8b5efda5 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1362.722362] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1362.889766] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775482, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.054201} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.890639] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e652e59f-9432-41cf-b4a5-0f5cf649b24e/e652e59f-9432-41cf-b4a5-0f5cf649b24e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1362.891426] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1362.892025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aecabefb-533b-4639-9bc8-6ba5011f0e5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.902137] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1362.902137] env[62508]: value = "task-1775484" [ 1362.902137] env[62508]: _type = "Task" [ 1362.902137] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.910071] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.001883] env[62508]: DEBUG nova.network.neutron [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updating instance_info_cache with network_info: [{"id": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "address": "fa:16:3e:58:64:15", "network": {"id": "ac7c30e4-6546-4d38-864a-242d0169a576", "bridge": "br-int", "label": "tempest-ServersTestJSON-108451886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1af268219a1496fb491ea6353ff551d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129ec0b1-e4", "ovs_interfaceid": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.116362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a31b628-1415-4e6e-b86e-edf5e6659a4c tempest-ImagesNegativeTestJSON-1083322589 tempest-ImagesNegativeTestJSON-1083322589-project-member] Lock "46a524e2-93b0-4726-812f-98e08b6ba0b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.106s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.165530] env[62508]: DEBUG nova.objects.base [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Object Instance<868cf942-f348-488d-b00a-af4c8b5efda5> lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1363.166647] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe817cb-252d-4e82-bc57-786bc8df552e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.189711] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4354f4-11f8-4784-afe0-b1bb3e707e62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.195406] env[62508]: DEBUG oslo_vmware.api [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1363.195406] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5218a9d5-26b3-1a09-ad1e-2618cda4c46b" [ 1363.195406] env[62508]: _type = "Task" [ 1363.195406] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.205177] env[62508]: DEBUG oslo_vmware.api [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5218a9d5-26b3-1a09-ad1e-2618cda4c46b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.411143] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072075} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.411568] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1363.415904] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128ad574-d2f9-4a70-b5f1-198ae65769aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.435308] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e652e59f-9432-41cf-b4a5-0f5cf649b24e/e652e59f-9432-41cf-b4a5-0f5cf649b24e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1363.435788] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1164b71d-5cce-4b52-abf0-dd948e6c8bc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.456771] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1363.456771] env[62508]: value = "task-1775485" [ 1363.456771] env[62508]: _type = "Task" [ 1363.456771] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.467500] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.505437] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Releasing lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.505783] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Instance network_info: |[{"id": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "address": "fa:16:3e:58:64:15", "network": {"id": "ac7c30e4-6546-4d38-864a-242d0169a576", "bridge": "br-int", "label": "tempest-ServersTestJSON-108451886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1af268219a1496fb491ea6353ff551d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129ec0b1-e4", "ovs_interfaceid": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1363.506232] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:64:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '129ec0b1-e45e-4868-b60c-d9b307a0d56c', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.514987] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Creating folder: Project (a1af268219a1496fb491ea6353ff551d). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1363.516620] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41ee7557-0286-4780-861e-2edc385ea8fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.528029] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Created folder: Project (a1af268219a1496fb491ea6353ff551d) in parent group-v368536. [ 1363.528211] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Creating folder: Instances. Parent ref: group-v368596. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1363.528466] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fda9196-5953-4e38-b6f7-b0ab0456adc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.537063] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Created folder: Instances in parent group-v368596. [ 1363.537327] env[62508]: DEBUG oslo.service.loopingcall [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.540665] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1363.540665] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cb124bd-1a7c-4935-808b-2515f047925c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.557597] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.557597] env[62508]: value = "task-1775488" [ 1363.557597] env[62508]: _type = "Task" [ 1363.557597] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.569139] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775488, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.709482] env[62508]: DEBUG oslo_vmware.api [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5218a9d5-26b3-1a09-ad1e-2618cda4c46b, 'name': SearchDatastore_Task, 'duration_secs': 0.013174} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.709801] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.917946] env[62508]: DEBUG nova.compute.manager [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Received event network-changed-129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.918206] env[62508]: DEBUG nova.compute.manager [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Refreshing instance network info cache due to event network-changed-129ec0b1-e45e-4868-b60c-d9b307a0d56c. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1363.919210] env[62508]: DEBUG oslo_concurrency.lockutils [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] Acquiring lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.919437] env[62508]: DEBUG oslo_concurrency.lockutils [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] Acquired lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.919655] env[62508]: DEBUG nova.network.neutron [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Refreshing network info cache for port 129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1363.973024] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.077801] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775488, 'name': CreateVM_Task, 'duration_secs': 0.509374} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.078442] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1364.078980] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.080137] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.080332] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1364.080731] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07a05fa-98bd-468e-ad00-09d9b2fd0728 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.088245] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1364.088245] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238d160-2ff3-3d24-fac8-d51360a3bf52" [ 1364.088245] env[62508]: _type = "Task" [ 1364.088245] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.095955] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238d160-2ff3-3d24-fac8-d51360a3bf52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.107647] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5646a12d-12e5-4c33-8df8-7a5812a90015 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.119448] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b51eb5-6b02-4404-be00-42d71231edfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.158929] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f676214-13a7-4446-a108-657ab4f39359 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.167100] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc22a4d3-951f-4224-ae66-ea286ab3c99a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.184230] env[62508]: DEBUG nova.compute.provider_tree [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.446657] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.446913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.472739] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775485, 'name': ReconfigVM_Task, 'duration_secs': 0.661795} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.473104] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e652e59f-9432-41cf-b4a5-0f5cf649b24e/e652e59f-9432-41cf-b4a5-0f5cf649b24e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.473800] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8432a40-3fc4-4fb8-bbaa-195a00e92996 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.481742] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1364.481742] env[62508]: value = "task-1775489" [ 1364.481742] env[62508]: _type = "Task" [ 1364.481742] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.494572] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775489, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.599893] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238d160-2ff3-3d24-fac8-d51360a3bf52, 'name': SearchDatastore_Task, 'duration_secs': 0.025064} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.600347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.600574] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1364.600912] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.601096] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.601318] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1364.602046] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3292b07-d78e-4cfa-8c62-184edd8f10f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.622078] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1364.622078] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1364.622078] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dab37116-b7c9-4692-a425-7317cb24b09a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.629367] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1364.629367] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eeacbc-1d9a-126b-4a30-04edcdf97994" [ 1364.629367] env[62508]: _type = "Task" [ 1364.629367] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.638434] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eeacbc-1d9a-126b-4a30-04edcdf97994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.687548] env[62508]: DEBUG nova.scheduler.client.report [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1364.765038] env[62508]: DEBUG nova.network.neutron [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updated VIF entry in instance network info cache for port 129ec0b1-e45e-4868-b60c-d9b307a0d56c. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.765417] env[62508]: DEBUG nova.network.neutron [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updating instance_info_cache with network_info: [{"id": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "address": "fa:16:3e:58:64:15", "network": {"id": "ac7c30e4-6546-4d38-864a-242d0169a576", "bridge": "br-int", "label": "tempest-ServersTestJSON-108451886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1af268219a1496fb491ea6353ff551d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129ec0b1-e4", "ovs_interfaceid": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.992415] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775489, 'name': Rename_Task, 'duration_secs': 0.139718} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.992695] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1364.992944] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ed0097a-5676-4b03-9613-5693df29bdf5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.999266] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1364.999266] env[62508]: value = "task-1775490" [ 1364.999266] env[62508]: _type = "Task" [ 1364.999266] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.006795] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.142314] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eeacbc-1d9a-126b-4a30-04edcdf97994, 'name': SearchDatastore_Task, 'duration_secs': 0.009077} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.143322] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6cfc2cc-e4ca-4b6b-88de-6060cd3aeff8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.149013] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1365.149013] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240589a-1f3f-f609-5f40-4779cc3971dc" [ 1365.149013] env[62508]: _type = "Task" [ 1365.149013] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.163726] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240589a-1f3f-f609-5f40-4779cc3971dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.195379] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.196030] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1365.199047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.616s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.200047] env[62508]: INFO nova.compute.claims [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1365.234039] env[62508]: DEBUG nova.compute.manager [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1365.235689] env[62508]: DEBUG nova.compute.manager [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing instance network info cache due to event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1365.235689] env[62508]: DEBUG oslo_concurrency.lockutils [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.235689] env[62508]: DEBUG oslo_concurrency.lockutils [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.235895] env[62508]: DEBUG nova.network.neutron [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1365.267663] env[62508]: DEBUG oslo_concurrency.lockutils [req-72442246-db5e-4d37-bdf1-257a2801f315 req-bcab73d3-5c2e-4142-92b6-0d6346c92234 service nova] Releasing lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.512275] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775490, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.659696] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5240589a-1f3f-f609-5f40-4779cc3971dc, 'name': SearchDatastore_Task, 'duration_secs': 0.0106} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.660071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.660410] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 73452964-d690-451d-98c3-fba3c3301c6d/73452964-d690-451d-98c3-fba3c3301c6d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1365.661042] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a1680c4-6221-4e7c-967d-b74147841533 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.670237] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1365.670237] env[62508]: value = "task-1775491" [ 1365.670237] env[62508]: _type = "Task" [ 1365.670237] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.679626] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.704678] env[62508]: DEBUG nova.compute.utils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1365.708694] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1365.708974] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1365.774302] env[62508]: DEBUG nova.policy [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a79a5afdc98410d820bebf40653a12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd58abb4cdcb74100b7c81076c7642b6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1366.011753] env[62508]: DEBUG oslo_vmware.api [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775490, 'name': PowerOnVM_Task, 'duration_secs': 0.68895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.011991] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1366.012266] env[62508]: INFO nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 10.37 seconds to spawn the instance on the hypervisor. [ 1366.012562] env[62508]: DEBUG nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1366.013772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc11bc6-a6a0-42ad-ba02-47bfd28a4e66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.181540] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775491, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.217821] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1366.359510] env[62508]: DEBUG nova.network.neutron [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updated VIF entry in instance network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1366.365065] env[62508]: DEBUG nova.network.neutron [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.446125] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Successfully created port: bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1366.462974] env[62508]: DEBUG nova.compute.manager [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1366.463173] env[62508]: DEBUG nova.compute.manager [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing instance network info cache due to event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1366.463758] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.535389] env[62508]: INFO nova.compute.manager [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 48.65 seconds to build instance. [ 1366.685132] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.8364} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.685132] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 73452964-d690-451d-98c3-fba3c3301c6d/73452964-d690-451d-98c3-fba3c3301c6d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1366.685132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1366.686323] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c33499bf-0c37-406c-89b0-a158852c1220 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.696350] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1366.696350] env[62508]: value = "task-1775492" [ 1366.696350] env[62508]: _type = "Task" [ 1366.696350] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.717244] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.743532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf9fc14-581e-47d0-8524-f94487e70379 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.753701] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705f0c7f-3ac5-44be-bdb6-85a5870bd9cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.787841] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f4000f-382a-4974-bec1-3fff20109632 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.797132] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00ba326-a7b4-4a1d-93f3-f147efe54016 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.813066] env[62508]: DEBUG nova.compute.provider_tree [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.867188] env[62508]: DEBUG oslo_concurrency.lockutils [req-9abbd73a-33f1-489d-b286-07e238aef143 req-7fa6b153-b575-4a18-9bdf-6f31849944cc service nova] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.867312] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.867504] env[62508]: DEBUG nova.network.neutron [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1367.037139] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6239ec75-b3cc-4ff2-a290-55e8cc4fdd20 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.871s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.214965] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.215279] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1367.216052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f5a5fb-d558-4461-84ce-a68af7b4c5e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.230745] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1367.241215] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 73452964-d690-451d-98c3-fba3c3301c6d/73452964-d690-451d-98c3-fba3c3301c6d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1367.243087] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76c43ff4-4d9c-4209-83ae-bdc694d474b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.263684] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1367.263684] env[62508]: value = "task-1775493" [ 1367.263684] env[62508]: _type = "Task" [ 1367.263684] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.265824] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1367.266081] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1367.266242] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1367.266425] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1367.266572] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1367.266719] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1367.266924] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1367.267110] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1367.267404] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1367.267466] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1367.267629] env[62508]: DEBUG nova.virt.hardware [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1367.268893] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495eb72b-639e-4cc4-894b-7190eeec2fed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.282932] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa278788-d803-4488-8577-25d644718b22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.288434] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.316844] env[62508]: DEBUG nova.scheduler.client.report [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.541525] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1367.782825] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.806516] env[62508]: DEBUG nova.network.neutron [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updated VIF entry in instance network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1367.806964] env[62508]: DEBUG nova.network.neutron [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.824215] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.824808] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1367.828826] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.456s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.833353] env[62508]: INFO nova.compute.claims [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.077337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.281173] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775493, 'name': ReconfigVM_Task, 'duration_secs': 0.821632} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.281484] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 73452964-d690-451d-98c3-fba3c3301c6d/73452964-d690-451d-98c3-fba3c3301c6d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1368.282112] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8f24823-e937-4007-847e-1487338e04a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.288916] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1368.288916] env[62508]: value = "task-1775494" [ 1368.288916] env[62508]: _type = "Task" [ 1368.288916] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.297925] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775494, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.310768] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fcce252-a4b2-43d9-8dcb-1a7e3912fe4f req-1a88400f-e53f-4896-aac2-05b9f12a9daf service nova] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.330699] env[62508]: DEBUG nova.compute.utils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1368.332169] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1368.332348] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1368.401280] env[62508]: DEBUG nova.policy [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1dd7d1d4d1e44b36b35692e763f3374b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b47a7c40d404f5780750a1d97276600', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1368.669773] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Successfully updated port: bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1368.800927] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775494, 'name': Rename_Task, 'duration_secs': 0.159923} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.801432] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1368.801819] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03a8ebb2-6e9b-4e6b-988c-30fffcc0e27d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.808444] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1368.808444] env[62508]: value = "task-1775495" [ 1368.808444] env[62508]: _type = "Task" [ 1368.808444] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.817932] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.836214] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1369.173027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.173027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.173027] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.271018] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Successfully created port: d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1369.320856] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775495, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.407874] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c48efb-0e60-49a4-9c01-33bac24195ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.418722] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa5669c-139c-46d3-940f-2f3882a21a30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.456295] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6272ce25-e1e1-4e39-891e-2b58bd308a6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.465271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b8a81a-6ff6-493c-9b41-2d3e1d76fa70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.486211] env[62508]: DEBUG nova.compute.provider_tree [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1369.721677] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1369.819266] env[62508]: DEBUG oslo_vmware.api [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775495, 'name': PowerOnVM_Task, 'duration_secs': 0.802766} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.819547] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1369.819752] env[62508]: INFO nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Took 9.43 seconds to spawn the instance on the hypervisor. [ 1369.819932] env[62508]: DEBUG nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1369.820759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094373be-43d2-48dc-a149-8177c6b65ec2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.854061] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1369.892423] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1369.892423] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1369.892423] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.892882] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1369.892882] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.892882] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1369.892882] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1369.892882] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1369.893019] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1369.893019] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1369.893215] env[62508]: DEBUG nova.virt.hardware [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1369.894937] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b4d38f-2179-4864-aad0-8283e7314621 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.905040] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7843dfa-2261-4d89-bd8f-0d1290d1a13e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.015872] env[62508]: ERROR nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [req-12e688e3-6232-4bcb-ae7d-aca62d486d61] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-12e688e3-6232-4bcb-ae7d-aca62d486d61"}]} [ 1370.037524] env[62508]: DEBUG nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1370.063765] env[62508]: DEBUG nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1370.063765] env[62508]: DEBUG nova.compute.provider_tree [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1370.082271] env[62508]: DEBUG nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1370.116840] env[62508]: DEBUG nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1370.175500] env[62508]: DEBUG nova.network.neutron [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updating instance_info_cache with network_info: [{"id": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "address": "fa:16:3e:b9:ec:80", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf067d3-85", "ovs_interfaceid": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.345295] env[62508]: INFO nova.compute.manager [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Took 50.05 seconds to build instance. [ 1370.428174] env[62508]: DEBUG nova.compute.manager [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1370.428174] env[62508]: DEBUG nova.compute.manager [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing instance network info cache due to event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1370.428174] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.428174] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.428174] env[62508]: DEBUG nova.network.neutron [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.664142] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c04e26-7cff-4f3b-a2f7-679b9ae92082 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.671976] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ca0a25-77e7-4d89-b33d-bcca79a4a01e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.680811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.681364] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Instance network_info: |[{"id": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "address": "fa:16:3e:b9:ec:80", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf067d3-85", "ovs_interfaceid": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1370.681776] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:ec:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcf067d3-85ff-44e6-bb5d-153c8b425360', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1370.691961] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating folder: Project (d58abb4cdcb74100b7c81076c7642b6f). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.692795] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cec92889-4ff9-45f3-adc3-3ecbaac0d5f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.722090] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbce5e29-638e-41c2-a162-3cf3ab2515f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.732246] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bd0db8-17b8-4ee9-9115-f8f7c735c781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.737985] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created folder: Project (d58abb4cdcb74100b7c81076c7642b6f) in parent group-v368536. [ 1370.737985] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating folder: Instances. Parent ref: group-v368599. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.737985] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f8b90a2-ff1d-40e7-b886-e36f1e4721ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.749257] env[62508]: DEBUG nova.compute.provider_tree [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1370.754737] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created folder: Instances in parent group-v368599. [ 1370.754737] env[62508]: DEBUG oslo.service.loopingcall [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.754737] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1370.754737] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4a77b74-01ac-41a7-b3f2-4e9046edc1c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.775075] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1370.775075] env[62508]: value = "task-1775498" [ 1370.775075] env[62508]: _type = "Task" [ 1370.775075] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.786699] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775498, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.852107] env[62508]: DEBUG oslo_concurrency.lockutils [None req-adacf661-863c-4b21-bac9-0e972b069ab1 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.376s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.189215] env[62508]: DEBUG nova.compute.manager [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.189479] env[62508]: DEBUG nova.compute.manager [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing instance network info cache due to event network-changed-119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1371.189607] env[62508]: DEBUG oslo_concurrency.lockutils [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] Acquiring lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.287195] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775498, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.327512] env[62508]: DEBUG nova.network.neutron [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updated VIF entry in instance network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.327886] env[62508]: DEBUG nova.network.neutron [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.329772] env[62508]: DEBUG nova.scheduler.client.report [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1371.330011] env[62508]: DEBUG nova.compute.provider_tree [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 49 to 50 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1371.330237] env[62508]: DEBUG nova.compute.provider_tree [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1371.354911] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1371.615408] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Successfully updated port: d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1371.795036] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775498, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.840671] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.840671] env[62508]: DEBUG nova.compute.manager [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Received event network-vif-plugged-bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.840671] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Acquiring lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.840671] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.840671] env[62508]: DEBUG oslo_concurrency.lockutils [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] Lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.840967] env[62508]: DEBUG nova.compute.manager [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] No waiting events found dispatching network-vif-plugged-bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1371.840967] env[62508]: WARNING nova.compute.manager [req-00062326-3907-4ce9-8198-5f9d619a897b req-63c1cce8-2224-4abe-b715-3394ece423fb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Received unexpected event network-vif-plugged-bcf067d3-85ff-44e6-bb5d-153c8b425360 for instance with vm_state building and task_state spawning. [ 1371.841273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.012s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.841661] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1371.844505] env[62508]: DEBUG oslo_concurrency.lockutils [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] Acquired lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.844689] env[62508]: DEBUG nova.network.neutron [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Refreshing network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.845911] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.375s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.849631] env[62508]: INFO nova.compute.claims [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.902838] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.123118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.123118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquired lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.123118] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1372.292189] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775498, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.353569] env[62508]: DEBUG nova.compute.utils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1372.357581] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1372.357754] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1372.454021] env[62508]: DEBUG nova.policy [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3ef6d1d940b4355afea33ce879649df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '259e35282a6d43778c432bcce94bd21d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1372.645794] env[62508]: DEBUG nova.network.neutron [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updated VIF entry in instance network info cache for port 119423da-6eda-483b-b0aa-050fdbcd42e6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.646843] env[62508]: DEBUG nova.network.neutron [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [{"id": "119423da-6eda-483b-b0aa-050fdbcd42e6", "address": "fa:16:3e:54:21:2d", "network": {"id": "9dc77876-f51a-4ae3-b5af-bbff5188cb44", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1439036833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "eb642c0539ac42a595c2c9817f39a178", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119423da-6e", "ovs_interfaceid": "119423da-6eda-483b-b0aa-050fdbcd42e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.670105] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1372.793772] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775498, 'name': CreateVM_Task, 'duration_secs': 1.648979} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.793908] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1372.794516] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.794683] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.798968] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1372.798968] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d5320c-712d-4570-8e72-66f9c8155e49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.800776] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1372.800776] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52260f95-be55-358c-314a-555f9727008e" [ 1372.800776] env[62508]: _type = "Task" [ 1372.800776] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.812390] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52260f95-be55-358c-314a-555f9727008e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.863987] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1372.941479] env[62508]: DEBUG nova.network.neutron [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Updating instance_info_cache with network_info: [{"id": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "address": "fa:16:3e:51:eb:19", "network": {"id": "730326a2-16a7-44c2-84df-589271e97270", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-262903296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b47a7c40d404f5780750a1d97276600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd19e2f57-30", "ovs_interfaceid": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.026774] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Successfully created port: 214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1373.153189] env[62508]: DEBUG oslo_concurrency.lockutils [req-e9e7ea03-0d61-4a95-a6b1-019fe1a6621d req-850c95d6-0ff7-4e83-a52b-4f83f64a0a1d service nova] Releasing lock "refresh_cache-42eb98a9-e341-4a17-9d76-2a2c37efc1a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.319188] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52260f95-be55-358c-314a-555f9727008e, 'name': SearchDatastore_Task, 'duration_secs': 0.010774} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.323021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.323021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1373.323021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.323021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.323370] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1373.323476] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe9b4fe4-17e0-4c1e-9a0e-213152dc749a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.339354] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1373.339354] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1373.340088] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0ce2818-7c04-4131-aea5-cc3507937613 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.346304] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1373.346304] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be1538-d4b2-f28a-1233-9aa8ed503798" [ 1373.346304] env[62508]: _type = "Task" [ 1373.346304] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.357832] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be1538-d4b2-f28a-1233-9aa8ed503798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.381105] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb06443-6082-44cb-881e-ab834b002b4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.389016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dec475-5eeb-4d92-a181-d0ab2420b3e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.420864] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b3e381-fa36-4867-b02e-91f98d8773e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.428744] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7d782d-3f08-4646-a13a-b7b9e80a9832 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.443786] env[62508]: DEBUG nova.compute.provider_tree [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.448390] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Releasing lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.449028] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Instance network_info: |[{"id": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "address": "fa:16:3e:51:eb:19", "network": {"id": "730326a2-16a7-44c2-84df-589271e97270", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-262903296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b47a7c40d404f5780750a1d97276600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd19e2f57-30", "ovs_interfaceid": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1373.449257] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:eb:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd19e2f57-30f3-41cd-b87b-d8378a2a8cc0', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1373.458983] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Creating folder: Project (3b47a7c40d404f5780750a1d97276600). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1373.458983] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfeb499f-3425-4348-8840-4d4da577d790 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.467972] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Created folder: Project (3b47a7c40d404f5780750a1d97276600) in parent group-v368536. [ 1373.468185] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Creating folder: Instances. Parent ref: group-v368602. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1373.468421] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12adc4fc-9b96-436c-a3a6-048ae8622e9f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.477626] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Created folder: Instances in parent group-v368602. [ 1373.477912] env[62508]: DEBUG oslo.service.loopingcall [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1373.478131] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1373.478353] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a56f520-ba5a-4cf3-ad2d-283afffafdfd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.498527] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1373.498527] env[62508]: value = "task-1775501" [ 1373.498527] env[62508]: _type = "Task" [ 1373.498527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.508548] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775501, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.523104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.523346] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.860092] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be1538-d4b2-f28a-1233-9aa8ed503798, 'name': SearchDatastore_Task, 'duration_secs': 0.017192} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.860943] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cad584a-f74c-48d4-b150-0097f619f30f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.866422] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1373.866422] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e6379-4f28-f804-586f-446c08ee5671" [ 1373.866422] env[62508]: _type = "Task" [ 1373.866422] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.876458] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e6379-4f28-f804-586f-446c08ee5671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.891599] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1373.920018] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1373.920018] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1373.920018] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.920265] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1373.920265] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.920265] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1373.920265] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1373.920265] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1373.920642] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1373.920974] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1373.924018] env[62508]: DEBUG nova.virt.hardware [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1373.924018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c976fd-64a3-49dc-91c4-25935c8703df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.930699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c30319-78c7-4d4b-bdc6-ad81183a295e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.950194] env[62508]: DEBUG nova.scheduler.client.report [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1374.009402] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775501, 'name': CreateVM_Task, 'duration_secs': 0.446413} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.010473] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1374.011031] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.011188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.011509] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1374.012060] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30b13f14-3fda-4bdc-b712-996f5b986c03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.017036] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1374.017036] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dc8c82-72d1-bb0b-67bf-236a7d19c976" [ 1374.017036] env[62508]: _type = "Task" [ 1374.017036] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.025025] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dc8c82-72d1-bb0b-67bf-236a7d19c976, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.030034] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Received event network-changed-bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.030034] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Refreshing instance network info cache due to event network-changed-bcf067d3-85ff-44e6-bb5d-153c8b425360. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1374.030034] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquiring lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.030331] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquired lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.030331] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Refreshing network info cache for port bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.320931] env[62508]: DEBUG nova.compute.manager [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Received event network-vif-plugged-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.321199] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Acquiring lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.322996] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.322996] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.322996] env[62508]: DEBUG nova.compute.manager [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] No waiting events found dispatching network-vif-plugged-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1374.322996] env[62508]: WARNING nova.compute.manager [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Received unexpected event network-vif-plugged-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 for instance with vm_state building and task_state spawning. [ 1374.322996] env[62508]: DEBUG nova.compute.manager [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Received event network-changed-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.323355] env[62508]: DEBUG nova.compute.manager [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Refreshing instance network info cache due to event network-changed-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1374.323355] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Acquiring lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.323355] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Acquired lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.323355] env[62508]: DEBUG nova.network.neutron [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Refreshing network info cache for port d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.379344] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e6379-4f28-f804-586f-446c08ee5671, 'name': SearchDatastore_Task, 'duration_secs': 0.010431} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.380114] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.380114] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/a226327d-11df-45e0-bef8-2337a0317c9e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1374.380402] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a994b306-bb6d-4a2f-86fc-97e78d5817a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.391091] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1374.391091] env[62508]: value = "task-1775502" [ 1374.391091] env[62508]: _type = "Task" [ 1374.391091] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.399670] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.456613] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.457363] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.460354] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 38.962s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.460530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.460687] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1374.461084] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.016s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.462760] env[62508]: INFO nova.compute.claims [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.469421] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b671ebe9-2e0a-47d5-887d-d3be960c7cb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.479662] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0374132a-5bbd-41a2-acaf-612041016acd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.494522] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3a13a0-b961-4d31-8fc9-f929b86c8208 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.501893] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c22798e-7fd8-4465-8e8d-077ae6bf1339 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.533510] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180812MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1374.533681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.546475] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dc8c82-72d1-bb0b-67bf-236a7d19c976, 'name': SearchDatastore_Task, 'duration_secs': 0.010011} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.546771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.547013] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1374.547316] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.547555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.547787] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.548062] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba27a8da-cac6-43bd-a15d-6c8c6f6bf769 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.557137] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.557137] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1374.557137] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3842dc9-5954-4be4-98aa-dfa925441ea1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.562009] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1374.562009] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52da844f-0a96-8412-edb2-55e9ed877994" [ 1374.562009] env[62508]: _type = "Task" [ 1374.562009] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.570277] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52da844f-0a96-8412-edb2-55e9ed877994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.904183] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775502, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.965880] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updated VIF entry in instance network info cache for port bcf067d3-85ff-44e6-bb5d-153c8b425360. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1374.966340] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updating instance_info_cache with network_info: [{"id": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "address": "fa:16:3e:b9:ec:80", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf067d3-85", "ovs_interfaceid": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.972121] env[62508]: DEBUG nova.compute.utils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.978783] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1374.978783] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.979232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "38d294a9-2f51-438d-b942-a88e380a981f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.979465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.074226] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52da844f-0a96-8412-edb2-55e9ed877994, 'name': SearchDatastore_Task, 'duration_secs': 0.00901} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.075102] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379eb578-79cc-4c05-9dbf-84363e249a2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.079384] env[62508]: DEBUG nova.policy [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df8b8a7ad1fe40adb3ff61fe8a3cfb3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1e8f147289480aa4ecab1500a0e3cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.083754] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1375.083754] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b185b-f263-97cf-eafb-a6bb4ae02b71" [ 1375.083754] env[62508]: _type = "Task" [ 1375.083754] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.094936] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b185b-f263-97cf-eafb-a6bb4ae02b71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.143249] env[62508]: DEBUG nova.network.neutron [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Updated VIF entry in instance network info cache for port d19e2f57-30f3-41cd-b87b-d8378a2a8cc0. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1375.143639] env[62508]: DEBUG nova.network.neutron [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Updating instance_info_cache with network_info: [{"id": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "address": "fa:16:3e:51:eb:19", "network": {"id": "730326a2-16a7-44c2-84df-589271e97270", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-262903296-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b47a7c40d404f5780750a1d97276600", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd19e2f57-30", "ovs_interfaceid": "d19e2f57-30f3-41cd-b87b-d8378a2a8cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.214414] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Successfully updated port: 214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1375.270924] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.271206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.271442] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.271647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.271862] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.274725] env[62508]: INFO nova.compute.manager [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Terminating instance [ 1375.277103] env[62508]: DEBUG nova.compute.manager [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1375.277289] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1375.280800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f51be3e-f4cf-4691-9db0-843eed4c5197 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.291043] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1375.291305] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5f9e0a1-3844-4f6b-b4bb-aa04f2d05087 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.298034] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1375.298034] env[62508]: value = "task-1775503" [ 1375.298034] env[62508]: _type = "Task" [ 1375.298034] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.305839] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775503, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.402117] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775502, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545199} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.402379] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/a226327d-11df-45e0-bef8-2337a0317c9e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1375.402696] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1375.403127] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d673c3b-a1a2-4fce-a1f5-74c3236f8a69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.409414] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1375.409414] env[62508]: value = "task-1775504" [ 1375.409414] env[62508]: _type = "Task" [ 1375.409414] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.418609] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.477449] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Releasing lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.477789] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1375.477969] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing instance network info cache due to event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1375.478208] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.478353] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.478516] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1375.479930] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1375.598090] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525b185b-f263-97cf-eafb-a6bb4ae02b71, 'name': SearchDatastore_Task, 'duration_secs': 0.017905} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.598961] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.598961] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b297d642-88a7-4acc-a94d-e1cb7df81982/b297d642-88a7-4acc-a94d-e1cb7df81982.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1375.599331] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b633277e-9b68-4dab-8d5e-63f89b12309d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.608501] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1375.608501] env[62508]: value = "task-1775505" [ 1375.608501] env[62508]: _type = "Task" [ 1375.608501] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.624172] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775505, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.646901] env[62508]: DEBUG oslo_concurrency.lockutils [req-3c94f412-05c4-4a48-a8ad-1b55b66c0978 req-29035167-ebf3-4650-bce7-504f15a3cf54 service nova] Releasing lock "refresh_cache-b297d642-88a7-4acc-a94d-e1cb7df81982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.717977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.718159] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquired lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.718319] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1375.814182] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775503, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.885261] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully created port: 93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.919574] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229079} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.919574] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1375.920391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8120407d-ad76-4723-9d7b-51bb092ffbd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.946322] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/a226327d-11df-45e0-bef8-2337a0317c9e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1375.949514] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a177785d-7cd2-452a-a7f5-e13c4da32d91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.971730] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1375.971730] env[62508]: value = "task-1775506" [ 1375.971730] env[62508]: _type = "Task" [ 1375.971730] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.980545] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775506, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.999174] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4640a817-12eb-49e6-a46c-4c180b401ca4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.010319] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a739c32-a1ad-4416-aec7-5a01e3e90ce1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.047434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db275365-a208-48cb-bc23-e0106f72696c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.059402] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf041be-0677-4b22-badb-471f749e3149 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.078438] env[62508]: DEBUG nova.compute.provider_tree [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1376.120603] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775505, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.257370] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1376.311045] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775503, 'name': PowerOffVM_Task, 'duration_secs': 0.528544} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.311397] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1376.311572] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1376.311860] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a65ed4aa-ad07-441e-96fe-7292b85bc089 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.471536] env[62508]: DEBUG nova.network.neutron [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Updating instance_info_cache with network_info: [{"id": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "address": "fa:16:3e:53:8f:78", "network": {"id": "6de1c787-ad5d-4886-aba8-aef3c43f9b0e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-288908091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "259e35282a6d43778c432bcce94bd21d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214565f6-9a", "ovs_interfaceid": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.484380] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.493830] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1376.497244] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1376.497441] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1376.497618] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Deleting the datastore file [datastore1] 42eb98a9-e341-4a17-9d76-2a2c37efc1a1 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.498105] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8108ffb-3c27-4eb1-8109-d8789b68199f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.507555] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for the task: (returnval){ [ 1376.507555] env[62508]: value = "task-1775508" [ 1376.507555] env[62508]: _type = "Task" [ 1376.507555] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.516421] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.523890] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updated VIF entry in instance network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.524723] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.535958] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.536294] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.536467] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.536660] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.537048] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.537048] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.537304] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.537506] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.537791] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.538056] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.538260] env[62508]: DEBUG nova.virt.hardware [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.539223] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc9d589-18b4-4856-b395-42df82b42c97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.548739] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee53a06-bc98-4e7d-b450-810150a5b5e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.570569] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully created port: fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1376.614422] env[62508]: ERROR nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [req-df561456-df5d-410d-b6ee-e1b07d9919d4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-df561456-df5d-410d-b6ee-e1b07d9919d4"}]} [ 1376.623715] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775505, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534805} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.624190] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b297d642-88a7-4acc-a94d-e1cb7df81982/b297d642-88a7-4acc-a94d-e1cb7df81982.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.624569] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.624931] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e060ce4d-e983-45bf-8c41-1b2dae448d2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.631552] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1376.631552] env[62508]: value = "task-1775509" [ 1376.631552] env[62508]: _type = "Task" [ 1376.631552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.641952] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.643398] env[62508]: DEBUG nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1376.670046] env[62508]: DEBUG nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1376.670046] env[62508]: DEBUG nova.compute.provider_tree [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1376.686428] env[62508]: DEBUG nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1376.714099] env[62508]: DEBUG nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1376.746333] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Received event network-vif-plugged-214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.746670] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Acquiring lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.746971] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.747904] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.747904] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] No waiting events found dispatching network-vif-plugged-214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1376.747904] env[62508]: WARNING nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Received unexpected event network-vif-plugged-214565f6-9a69-416b-9a71-6b98dcdf82a2 for instance with vm_state building and task_state spawning. [ 1376.747904] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Received event network-changed-214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.747904] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Refreshing instance network info cache due to event network-changed-214565f6-9a69-416b-9a71-6b98dcdf82a2. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1376.748437] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Acquiring lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.970231] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully created port: 39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1376.982139] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Releasing lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.982358] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Instance network_info: |[{"id": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "address": "fa:16:3e:53:8f:78", "network": {"id": "6de1c787-ad5d-4886-aba8-aef3c43f9b0e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-288908091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "259e35282a6d43778c432bcce94bd21d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214565f6-9a", "ovs_interfaceid": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1376.982668] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Acquired lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.982803] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Refreshing network info cache for port 214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.984995] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:8f:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '214565f6-9a69-416b-9a71-6b98dcdf82a2', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1376.992405] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Creating folder: Project (259e35282a6d43778c432bcce94bd21d). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1376.996624] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b35bb5dd-d6aa-4bca-861a-9ba75d9aecad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.999554] env[62508]: DEBUG nova.compute.manager [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Received event network-changed-129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.999736] env[62508]: DEBUG nova.compute.manager [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Refreshing instance network info cache due to event network-changed-129ec0b1-e45e-4868-b60c-d9b307a0d56c. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1376.999944] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] Acquiring lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.000096] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] Acquired lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.000255] env[62508]: DEBUG nova.network.neutron [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Refreshing network info cache for port 129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1377.005810] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775506, 'name': ReconfigVM_Task, 'duration_secs': 0.882777} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.006496] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Reconfigured VM instance instance-00000017 to attach disk [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/a226327d-11df-45e0-bef8-2337a0317c9e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.007350] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97f77222-ae54-4aef-9b03-f2fe2c0260c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.022286] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1377.022286] env[62508]: value = "task-1775511" [ 1377.022286] env[62508]: _type = "Task" [ 1377.022286] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.022523] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Created folder: Project (259e35282a6d43778c432bcce94bd21d) in parent group-v368536. [ 1377.022691] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Creating folder: Instances. Parent ref: group-v368605. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1377.028509] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61894e67-2c70-4da0-9941-0c4ebf88c6c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.028509] env[62508]: DEBUG oslo_vmware.api [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Task: {'id': task-1775508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413482} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.034319] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.034319] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.034319] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing instance network info cache due to event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1377.034475] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.034618] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.034796] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1377.035962] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1377.036130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1377.036302] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1377.036464] env[62508]: INFO nova.compute.manager [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1377.036706] env[62508]: DEBUG oslo.service.loopingcall [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.037748] env[62508]: DEBUG nova.compute.manager [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1377.037748] env[62508]: DEBUG nova.network.neutron [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1377.045098] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775511, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.046701] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Created folder: Instances in parent group-v368605. [ 1377.046853] env[62508]: DEBUG oslo.service.loopingcall [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.047051] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1377.047252] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3634aaac-00c5-4708-881e-5acd09042984 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.070324] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1377.070324] env[62508]: value = "task-1775513" [ 1377.070324] env[62508]: _type = "Task" [ 1377.070324] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.078973] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775513, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.145290] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074308} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.147484] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1377.148398] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba447539-3439-4d82-8070-6571a6efdf6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.174423] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] b297d642-88a7-4acc-a94d-e1cb7df81982/b297d642-88a7-4acc-a94d-e1cb7df81982.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.176165] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b144def8-38e4-4fe0-99ea-2d4be6277743 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.202144] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1377.202144] env[62508]: value = "task-1775514" [ 1377.202144] env[62508]: _type = "Task" [ 1377.202144] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.213274] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775514, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.264936] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30db3f60-85ce-4d9c-99bc-e2c769947e6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.272676] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992d0503-c369-4c34-b721-524430fe4347 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.308354] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cb3cb4-e277-459e-a2ff-c314d16dcae9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.316893] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799c397c-4a14-4776-b71e-04ba34d3324f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.334321] env[62508]: DEBUG nova.compute.provider_tree [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1377.537088] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775511, 'name': Rename_Task, 'duration_secs': 0.148601} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.537418] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.539718] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-200b8736-9d8f-4ad6-9c6d-4a46b0a7b839 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.547020] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1377.547020] env[62508]: value = "task-1775515" [ 1377.547020] env[62508]: _type = "Task" [ 1377.547020] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.555934] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.579815] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775513, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.696803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.697130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.697362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.697548] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.697728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.700011] env[62508]: INFO nova.compute.manager [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Terminating instance [ 1377.704011] env[62508]: DEBUG nova.compute.manager [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1377.704477] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.709632] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4d2ead-61bf-414f-bd33-aa5a4a03eed3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.718911] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775514, 'name': ReconfigVM_Task, 'duration_secs': 0.298978} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.721540] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Reconfigured VM instance instance-00000018 to attach disk [datastore1] b297d642-88a7-4acc-a94d-e1cb7df81982/b297d642-88a7-4acc-a94d-e1cb7df81982.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.723012] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.723282] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-407b16a4-daed-4b80-bbf8-499329a264fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.725345] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceb93b7f-5126-4e28-8ddb-a7e54e5266a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.732090] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1377.732090] env[62508]: value = "task-1775517" [ 1377.732090] env[62508]: _type = "Task" [ 1377.732090] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.735646] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1377.735646] env[62508]: value = "task-1775516" [ 1377.735646] env[62508]: _type = "Task" [ 1377.735646] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.755865] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775516, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.759809] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.862945] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Updated VIF entry in instance network info cache for port 214565f6-9a69-416b-9a71-6b98dcdf82a2. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.863395] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Updating instance_info_cache with network_info: [{"id": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "address": "fa:16:3e:53:8f:78", "network": {"id": "6de1c787-ad5d-4886-aba8-aef3c43f9b0e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-288908091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "259e35282a6d43778c432bcce94bd21d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214565f6-9a", "ovs_interfaceid": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.889985] env[62508]: DEBUG nova.scheduler.client.report [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1377.890335] env[62508]: DEBUG nova.compute.provider_tree [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 52 to 53 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1377.890611] env[62508]: DEBUG nova.compute.provider_tree [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1377.932253] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updated VIF entry in instance network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.932612] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.986022] env[62508]: DEBUG nova.network.neutron [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updated VIF entry in instance network info cache for port 129ec0b1-e45e-4868-b60c-d9b307a0d56c. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.986393] env[62508]: DEBUG nova.network.neutron [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updating instance_info_cache with network_info: [{"id": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "address": "fa:16:3e:58:64:15", "network": {"id": "ac7c30e4-6546-4d38-864a-242d0169a576", "bridge": "br-int", "label": "tempest-ServersTestJSON-108451886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1af268219a1496fb491ea6353ff551d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129ec0b1-e4", "ovs_interfaceid": "129ec0b1-e45e-4868-b60c-d9b307a0d56c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.056474] env[62508]: DEBUG oslo_vmware.api [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775515, 'name': PowerOnVM_Task, 'duration_secs': 0.489754} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.056730] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1378.056937] env[62508]: INFO nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1378.057140] env[62508]: DEBUG nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1378.057951] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467967df-fc45-489b-be62-5b4d25f8a57a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.079095] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775513, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.126332] env[62508]: DEBUG nova.network.neutron [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.245579] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.253660] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775516, 'name': Rename_Task, 'duration_secs': 0.149738} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.254289] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.254620] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e349b280-9b2d-4abf-9a81-316f260d01ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.261466] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1378.261466] env[62508]: value = "task-1775518" [ 1378.261466] env[62508]: _type = "Task" [ 1378.261466] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.269193] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.366549] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Releasing lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.367915] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1378.368550] env[62508]: DEBUG nova.compute.manager [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing instance network info cache due to event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1378.369032] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Acquiring lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.369320] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Acquired lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.369577] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing network info cache for port c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.397854] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.936s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.398453] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1378.402453] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.295s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.402453] env[62508]: DEBUG nova.objects.instance [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lazy-loading 'resources' on Instance uuid 575ea3dc-850d-4078-8678-41b3c40a4c27 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1378.434919] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.435197] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1378.435365] env[62508]: DEBUG nova.compute.manager [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing instance network info cache due to event network-changed-c1117e3d-5c3e-4513-832b-12ce45699cab. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1378.435562] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquiring lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.489583] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a3c9f28-d070-4903-b2d9-0525b65acae6 req-2a5a51c8-8129-47ea-bb7d-5c7590fb597d service nova] Releasing lock "refresh_cache-73452964-d690-451d-98c3-fba3c3301c6d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.580722] env[62508]: INFO nova.compute.manager [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Took 55.31 seconds to build instance. [ 1378.584676] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775513, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.629076] env[62508]: INFO nova.compute.manager [-] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Took 1.59 seconds to deallocate network for instance. [ 1378.746781] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775517, 'name': PowerOffVM_Task, 'duration_secs': 0.922287} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.746949] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1378.746949] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.747323] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7efd48ba-3b7f-496a-bb15-39544a191bde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.775128] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775518, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.828107] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.828337] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.828519] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleting the datastore file [datastore1] e652e59f-9432-41cf-b4a5-0f5cf649b24e {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.828792] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6123b1f-4c96-40f8-9500-901973d70dc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.835091] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1378.835091] env[62508]: value = "task-1775520" [ 1378.835091] env[62508]: _type = "Task" [ 1378.835091] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.842866] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.904421] env[62508]: DEBUG nova.compute.utils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.906525] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1378.906525] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1378.921066] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully updated port: 93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.983346] env[62508]: DEBUG nova.policy [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f551d4352d914c58a846380ffe016437', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa9fc12f40cf45729330d52f46688f41', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1379.087798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40d915d7-0f99-4441-852c-8e128865bb33 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.248s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.088028] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775513, 'name': CreateVM_Task, 'duration_secs': 1.521236} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.089182] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1379.089798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.090246] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.090311] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1379.090548] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49f56dca-4dd5-417e-abac-32b59246a904 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.101651] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1379.101651] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ec5195-41b6-fdcf-73ca-1c2d04a3ce0f" [ 1379.101651] env[62508]: _type = "Task" [ 1379.101651] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.110200] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ec5195-41b6-fdcf-73ca-1c2d04a3ce0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.137227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.214025] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updated VIF entry in instance network info cache for port c1117e3d-5c3e-4513-832b-12ce45699cab. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1379.214437] env[62508]: DEBUG nova.network.neutron [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [{"id": "c1117e3d-5c3e-4513-832b-12ce45699cab", "address": "fa:16:3e:bf:62:e4", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1117e3d-5c", "ovs_interfaceid": "c1117e3d-5c3e-4513-832b-12ce45699cab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.253582] env[62508]: DEBUG nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Received event network-vif-deleted-119423da-6eda-483b-b0aa-050fdbcd42e6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.253582] env[62508]: DEBUG nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.253712] env[62508]: DEBUG nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing instance network info cache due to event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1379.253921] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.254110] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.254231] env[62508]: DEBUG nova.network.neutron [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.274128] env[62508]: DEBUG oslo_vmware.api [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775518, 'name': PowerOnVM_Task, 'duration_secs': 0.782583} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.277436] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.277436] env[62508]: INFO nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Took 9.42 seconds to spawn the instance on the hypervisor. [ 1379.277436] env[62508]: DEBUG nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1379.278327] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c6b1b8-8fc9-4cc4-a3ab-22826da27dd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.339892] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Successfully created port: 98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.349592] env[62508]: DEBUG oslo_vmware.api [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284708} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.349877] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1379.350068] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1379.350495] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1379.350495] env[62508]: INFO nova.compute.manager [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1379.351129] env[62508]: DEBUG oslo.service.loopingcall [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.351327] env[62508]: DEBUG nova.compute.manager [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1379.351419] env[62508]: DEBUG nova.network.neutron [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.413027] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1379.422852] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b584361-1570-4793-998b-754bf10729b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.434702] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d4ded2-334d-4ee5-af1f-9d12528d72a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.470920] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2a4001-b1e2-4235-834a-18ac8b3ca804 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.480031] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29be87a-6720-4a4c-b972-143014fef9bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.495795] env[62508]: DEBUG nova.compute.provider_tree [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.600071] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1379.612563] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ec5195-41b6-fdcf-73ca-1c2d04a3ce0f, 'name': SearchDatastore_Task, 'duration_secs': 0.02287} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.612888] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.613425] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.615150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.615150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.615150] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.615150] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdc52414-98e2-49b0-acd4-d805c8bfffd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.626769] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.626965] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.628063] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0111c79b-f1aa-4edb-8801-4f741b6ab706 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.634241] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1379.634241] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5212eb3e-af76-5496-ce89-8350b1d5d72d" [ 1379.634241] env[62508]: _type = "Task" [ 1379.634241] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.642633] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5212eb3e-af76-5496-ce89-8350b1d5d72d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.717713] env[62508]: DEBUG oslo_concurrency.lockutils [req-2d6b2373-b476-4abc-8eb1-e7410f2607ce req-d8763077-616f-40af-b688-48cacf620211 service nova] Releasing lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.718520] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Acquired lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.718838] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Refreshing network info cache for port c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.801043] env[62508]: INFO nova.compute.manager [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Took 49.24 seconds to build instance. [ 1379.859849] env[62508]: INFO nova.compute.manager [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Rescuing [ 1379.860103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.860264] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.860429] env[62508]: DEBUG nova.network.neutron [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.003028] env[62508]: DEBUG nova.scheduler.client.report [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1380.145597] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5212eb3e-af76-5496-ce89-8350b1d5d72d, 'name': SearchDatastore_Task, 'duration_secs': 0.015476} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.146624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.147417] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05e87076-c792-4e6c-ba06-6a85c5fedad4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.153412] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1380.153412] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525e9d55-68e6-ee05-01ba-f6cb5d45246a" [ 1380.153412] env[62508]: _type = "Task" [ 1380.153412] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.161654] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525e9d55-68e6-ee05-01ba-f6cb5d45246a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.239867] env[62508]: INFO nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Port c1117e3d-5c3e-4513-832b-12ce45699cab from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1380.239867] env[62508]: DEBUG nova.network.neutron [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.241291] env[62508]: DEBUG nova.network.neutron [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updated VIF entry in instance network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.241606] env[62508]: DEBUG nova.network.neutron [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.306920] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73eff98f-32a5-4aa7-872a-362a9a859812 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.935s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.340028] env[62508]: DEBUG nova.network.neutron [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.423639] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1380.459652] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1380.459891] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1380.460061] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1380.460247] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1380.460388] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1380.460538] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1380.460740] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1380.460926] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1380.461122] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1380.461289] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1380.461461] env[62508]: DEBUG nova.virt.hardware [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.462328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13310c92-92c9-4392-88e8-a4e08b93d665 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.471071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cdce96-b3f3-4073-9d1a-11e545ea2a0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.508582] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.510897] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.826s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.512572] env[62508]: INFO nova.compute.claims [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.560154] env[62508]: INFO nova.scheduler.client.report [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted allocations for instance 575ea3dc-850d-4078-8678-41b3c40a4c27 [ 1380.667846] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525e9d55-68e6-ee05-01ba-f6cb5d45246a, 'name': SearchDatastore_Task, 'duration_secs': 0.008817} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.668937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.669247] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2f7b7109-0ced-4ea4-8dde-608655f2b3ab/2f7b7109-0ced-4ea4-8dde-608655f2b3ab.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1380.669532] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ec91556-3bae-4eb0-9fa5-597645e78aed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.677256] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1380.677256] env[62508]: value = "task-1775521" [ 1380.677256] env[62508]: _type = "Task" [ 1380.677256] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.687549] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.713498] env[62508]: DEBUG nova.network.neutron [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updating instance_info_cache with network_info: [{"id": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "address": "fa:16:3e:b9:ec:80", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf067d3-85", "ovs_interfaceid": "bcf067d3-85ff-44e6-bb5d-153c8b425360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.744380] env[62508]: DEBUG oslo_concurrency.lockutils [req-9beef64b-5f8c-410e-b3e0-298178a259c1 req-a347ef9e-590f-4652-9d45-b8eb6f552beb service nova] Releasing lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.744950] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.745214] env[62508]: DEBUG nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-plugged-93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1380.745481] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.745633] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.745798] env[62508]: DEBUG oslo_concurrency.lockutils [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.746780] env[62508]: DEBUG nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] No waiting events found dispatching network-vif-plugged-93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1380.746780] env[62508]: WARNING nova.compute.manager [req-1b1af74e-a99e-49cf-af16-ec3f275146fa req-d324c994-8ee1-41e3-96ef-086eec647379 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received unexpected event network-vif-plugged-93c4291d-0197-4fb0-9a10-3b95a4a17e60 for instance with vm_state building and task_state spawning. [ 1380.810185] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1380.842425] env[62508]: INFO nova.compute.manager [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 1.49 seconds to deallocate network for instance. [ 1381.073515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c0f7c724-3a16-4c1a-9fc7-0ac05612e7ce tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "575ea3dc-850d-4078-8678-41b3c40a4c27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.288s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.189753] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775521, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.223143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "refresh_cache-a226327d-11df-45e0-bef8-2337a0317c9e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.346989] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.348931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.551288] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully updated port: fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.570033] env[62508]: DEBUG nova.compute.manager [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.570356] env[62508]: DEBUG nova.compute.manager [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing instance network info cache due to event network-changed-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1381.570712] env[62508]: DEBUG oslo_concurrency.lockutils [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] Acquiring lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.570994] env[62508]: DEBUG oslo_concurrency.lockutils [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] Acquired lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.571774] env[62508]: DEBUG nova.network.neutron [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Refreshing network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.622049] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Successfully updated port: 98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.687838] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539592} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.690399] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2f7b7109-0ced-4ea4-8dde-608655f2b3ab/2f7b7109-0ced-4ea4-8dde-608655f2b3ab.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1381.690611] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1381.691496] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d86e78a1-23dc-4577-a5c2-298191f7383a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.697529] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1381.697529] env[62508]: value = "task-1775522" [ 1381.697529] env[62508]: _type = "Task" [ 1381.697529] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.705633] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.739617] env[62508]: DEBUG nova.compute.manager [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-changed-93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.739819] env[62508]: DEBUG nova.compute.manager [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing instance network info cache due to event network-changed-93c4291d-0197-4fb0-9a10-3b95a4a17e60. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1381.740053] env[62508]: DEBUG oslo_concurrency.lockutils [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] Acquiring lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.740261] env[62508]: DEBUG oslo_concurrency.lockutils [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] Acquired lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.740395] env[62508]: DEBUG nova.network.neutron [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing network info cache for port 93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.761738] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1381.762086] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97bf087d-9093-4f60-b41c-4b6d51d64b16 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.768662] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1381.768662] env[62508]: value = "task-1775523" [ 1381.768662] env[62508]: _type = "Task" [ 1381.768662] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.778245] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.998415] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988025a2-6c68-4279-a278-61a2723ab60b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.006522] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfa3cad-593b-4a96-b6c4-bced564cccb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.042894] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962662f0-eaf9-4f0d-87c7-511adf528bba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.054046] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b89d71-cbe0-430b-97cb-434185f56b14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.066227] env[62508]: DEBUG nova.compute.provider_tree [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.129822] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.129984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.130178] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.159653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.208879] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068525} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.211866] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1382.212738] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f78e50-54f8-4515-b186-2fa70ea4453a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.236779] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 2f7b7109-0ced-4ea4-8dde-608655f2b3ab/2f7b7109-0ced-4ea4-8dde-608655f2b3ab.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1382.240238] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8187742e-62c3-4763-b3ad-793bcdb29827 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.262052] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1382.262052] env[62508]: value = "task-1775524" [ 1382.262052] env[62508]: _type = "Task" [ 1382.262052] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.270658] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.280818] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775523, 'name': PowerOffVM_Task, 'duration_secs': 0.29054} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.280818] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1382.280818] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac329ffe-fe6e-4cc6-9250-74ad5ecc02f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.305265] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b9ff8a-07d3-409e-8143-6dbc726d6779 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.316186] env[62508]: DEBUG nova.network.neutron [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.331330] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.331330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2365968c-59f4-47b3-a7a5-635b065f9742 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.339525] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1382.339525] env[62508]: value = "task-1775525" [ 1382.339525] env[62508]: _type = "Task" [ 1382.339525] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.350200] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1382.350409] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.350645] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.350998] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.351073] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1382.351279] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abbb90b3-82f8-4ba7-b03a-63712f5fbb52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.358677] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1382.358891] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1382.359651] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7664b9d-b78b-4211-9c1d-cd24ec818d98 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.364766] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1382.364766] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1a896-8141-cd2f-fd86-a4f217dd2c7b" [ 1382.364766] env[62508]: _type = "Task" [ 1382.364766] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.372164] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1a896-8141-cd2f-fd86-a4f217dd2c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.387132] env[62508]: DEBUG nova.network.neutron [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updated VIF entry in instance network info cache for port 987ac3c1-9f91-4672-9ca9-339fd8ad1dfd. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.387477] env[62508]: DEBUG nova.network.neutron [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [{"id": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "address": "fa:16:3e:0d:65:46", "network": {"id": "93471a4c-8043-41bc-9a6c-49116ad50d1a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2104534686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d188021b90b4a7cb04521e090d0c1c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap987ac3c1-9f", "ovs_interfaceid": "987ac3c1-9f91-4672-9ca9-339fd8ad1dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.497783] env[62508]: DEBUG nova.network.neutron [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.569847] env[62508]: DEBUG nova.scheduler.client.report [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.663429] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.771761] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775524, 'name': ReconfigVM_Task, 'duration_secs': 0.313556} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.772171] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 2f7b7109-0ced-4ea4-8dde-608655f2b3ab/2f7b7109-0ced-4ea4-8dde-608655f2b3ab.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1382.773097] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e89ffe4f-5a04-4f19-b60c-5afdb833ec20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.779527] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1382.779527] env[62508]: value = "task-1775526" [ 1382.779527] env[62508]: _type = "Task" [ 1382.779527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.787266] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775526, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.863380] env[62508]: DEBUG nova.network.neutron [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Updating instance_info_cache with network_info: [{"id": "98a8b500-40eb-420e-8812-e8780d0c7c17", "address": "fa:16:3e:d6:ae:4f", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a8b500-40", "ovs_interfaceid": "98a8b500-40eb-420e-8812-e8780d0c7c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.876033] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1a896-8141-cd2f-fd86-a4f217dd2c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.008039} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.876836] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0e7fc13-2a3c-47b4-aae4-2c12124ca1c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.883119] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1382.883119] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52223795-b334-ed3c-adbc-2223e7afdf16" [ 1382.883119] env[62508]: _type = "Task" [ 1382.883119] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.894120] env[62508]: DEBUG oslo_concurrency.lockutils [req-d12cb9f6-b394-4261-a20f-5e7741bb65bd req-d157d648-11c4-4250-acce-59e997b8df14 service nova] Releasing lock "refresh_cache-ee99ff4d-9996-4cfa-b038-7b19aef27438" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.894892] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52223795-b334-ed3c-adbc-2223e7afdf16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.001371] env[62508]: DEBUG oslo_concurrency.lockutils [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] Releasing lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.001646] env[62508]: DEBUG nova.compute.manager [req-15afb4ad-bd88-466c-b790-27daae2a1caf req-10b10f0d-c9ed-4d10-999c-8def65e2057b service nova] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Received event network-vif-deleted-c1117e3d-5c3e-4513-832b-12ce45699cab {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.075378] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.075945] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1383.084149] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.420s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.087601] env[62508]: INFO nova.compute.claims [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1383.292441] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775526, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.368019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.368019] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Instance network_info: |[{"id": "98a8b500-40eb-420e-8812-e8780d0c7c17", "address": "fa:16:3e:d6:ae:4f", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a8b500-40", "ovs_interfaceid": "98a8b500-40eb-420e-8812-e8780d0c7c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1383.368254] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:ae:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98a8b500-40eb-420e-8812-e8780d0c7c17', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.375111] env[62508]: DEBUG oslo.service.loopingcall [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.377392] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.377835] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc943e86-b7d4-4451-9302-abfde6fbe9fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.402855] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52223795-b334-ed3c-adbc-2223e7afdf16, 'name': SearchDatastore_Task, 'duration_secs': 0.012581} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.404276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.404671] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1383.405046] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.405046] env[62508]: value = "task-1775527" [ 1383.405046] env[62508]: _type = "Task" [ 1383.405046] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.405433] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff7c6546-d1f0-4a83-84d3-c2accde3adc6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.416640] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775527, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.418423] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1383.418423] env[62508]: value = "task-1775528" [ 1383.418423] env[62508]: _type = "Task" [ 1383.418423] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.428579] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.590569] env[62508]: DEBUG nova.compute.utils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.592589] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1383.592589] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1383.679642] env[62508]: DEBUG nova.policy [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b800673f96f044ed88b315cff40b1ef5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96ce65b1fa5f4a2aad45957e276145ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1383.736586] env[62508]: DEBUG nova.compute.manager [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Received event network-vif-plugged-98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.736840] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Acquiring lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.737040] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.737205] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.737369] env[62508]: DEBUG nova.compute.manager [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] No waiting events found dispatching network-vif-plugged-98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.737525] env[62508]: WARNING nova.compute.manager [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Received unexpected event network-vif-plugged-98a8b500-40eb-420e-8812-e8780d0c7c17 for instance with vm_state building and task_state spawning. [ 1383.737679] env[62508]: DEBUG nova.compute.manager [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Received event network-changed-98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.737823] env[62508]: DEBUG nova.compute.manager [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Refreshing instance network info cache due to event network-changed-98a8b500-40eb-420e-8812-e8780d0c7c17. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.740057] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Acquiring lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.740057] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Acquired lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.740057] env[62508]: DEBUG nova.network.neutron [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Refreshing network info cache for port 98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.790754] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775526, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.810274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "b297d642-88a7-4acc-a94d-e1cb7df81982" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.810487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.811087] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.811345] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.811524] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.815118] env[62508]: INFO nova.compute.manager [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Terminating instance [ 1383.817586] env[62508]: DEBUG nova.compute.manager [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1383.817786] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1383.818633] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44f3535-2a66-4737-9128-3035b8c7458a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.826496] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1383.826781] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f9e120a-8398-4f7f-9a08-88845f51e6d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.832979] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1383.832979] env[62508]: value = "task-1775529" [ 1383.832979] env[62508]: _type = "Task" [ 1383.832979] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.843316] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.867010] env[62508]: DEBUG nova.compute.manager [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-plugged-fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.867282] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.867772] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.868007] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.868448] env[62508]: DEBUG nova.compute.manager [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] No waiting events found dispatching network-vif-plugged-fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.868984] env[62508]: WARNING nova.compute.manager [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received unexpected event network-vif-plugged-fbbd6f2c-037f-4a48-ad02-f3292bd93802 for instance with vm_state building and task_state spawning. [ 1383.869263] env[62508]: DEBUG nova.compute.manager [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-changed-fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.869455] env[62508]: DEBUG nova.compute.manager [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing instance network info cache due to event network-changed-fbbd6f2c-037f-4a48-ad02-f3292bd93802. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.869732] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Acquiring lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.870143] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Acquired lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.870360] env[62508]: DEBUG nova.network.neutron [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing network info cache for port fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.917735] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775527, 'name': CreateVM_Task, 'duration_secs': 0.368826} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.917927] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1383.918808] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.919401] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.919401] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1383.919700] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38bea192-6503-4d58-9ee6-8c4d4ac9f922 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.928592] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1383.928592] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520032b1-3863-0a10-9753-3c9cc781d084" [ 1383.928592] env[62508]: _type = "Task" [ 1383.928592] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.933386] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.943091] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520032b1-3863-0a10-9753-3c9cc781d084, 'name': SearchDatastore_Task, 'duration_secs': 0.009711} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.943414] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.943664] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.943910] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.944083] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.947437] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.947781] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8071a35-8224-4f75-bba5-84a704e6b6d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.956286] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.956479] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1383.957263] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf386701-f4ce-4ce7-8239-600b7787a166 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.962377] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1383.962377] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e0c5b-d73b-94ea-a1de-155ce33ab1fb" [ 1383.962377] env[62508]: _type = "Task" [ 1383.962377] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.971680] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e0c5b-d73b-94ea-a1de-155ce33ab1fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.101328] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1384.115521] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Successfully updated port: 39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.292032] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775526, 'name': Rename_Task, 'duration_secs': 1.161726} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.292297] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1384.295345] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ef44fae-161e-4e97-ad95-de0a970b03a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.302667] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1384.302667] env[62508]: value = "task-1775530" [ 1384.302667] env[62508]: _type = "Task" [ 1384.302667] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.309638] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Successfully created port: f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1384.315974] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.342599] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775529, 'name': PowerOffVM_Task, 'duration_secs': 0.201345} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.342888] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.343136] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1384.343438] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b51ad1e-cb5a-4a17-98fa-a4d9aec12c54 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.432179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1384.432179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1384.432179] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Deleting the datastore file [datastore1] b297d642-88a7-4acc-a94d-e1cb7df81982 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.436129] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd3dda98-4081-4091-940d-e81f875b6bef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.438440] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.940849} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.438848] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1384.440384] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d92232-b4ef-4774-9bda-c9105ff1e7e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.447315] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for the task: (returnval){ [ 1384.447315] env[62508]: value = "task-1775532" [ 1384.447315] env[62508]: _type = "Task" [ 1384.447315] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.451988] env[62508]: DEBUG nova.network.neutron [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1384.480140] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1384.491305] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcd6456f-dd64-455c-8f1f-c62f0b2b6683 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.513562] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.518768] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e0c5b-d73b-94ea-a1de-155ce33ab1fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008722} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.520838] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1384.520838] env[62508]: value = "task-1775533" [ 1384.520838] env[62508]: _type = "Task" [ 1384.520838] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.521122] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-839d7ff5-6270-4504-ab68-415cf5a712aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.538390] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775533, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.542053] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1384.542053] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52008883-c534-a88d-c1ee-1a0deaf3df73" [ 1384.542053] env[62508]: _type = "Task" [ 1384.542053] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.553078] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52008883-c534-a88d-c1ee-1a0deaf3df73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.619720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.632560] env[62508]: DEBUG nova.network.neutron [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.716021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937873d0-d3f2-4d1b-a0b9-c450aba11f86 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.724177] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb5abe6-c70b-4828-b329-57b1a75fa31b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.765277] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc88404-3dc0-4573-87f4-0bab927a8153 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.774078] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282f95ca-7856-46b3-91d7-f1dff309d9ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.791777] env[62508]: DEBUG nova.compute.provider_tree [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.816542] env[62508]: DEBUG oslo_vmware.api [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775530, 'name': PowerOnVM_Task, 'duration_secs': 0.504751} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.816767] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1384.816974] env[62508]: INFO nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Took 10.93 seconds to spawn the instance on the hypervisor. [ 1384.817172] env[62508]: DEBUG nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1384.818277] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e032b2c-914c-47e6-bd50-e0d271098cb2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.958405] env[62508]: DEBUG oslo_vmware.api [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Task: {'id': task-1775532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214955} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.958659] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1384.958883] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1384.959043] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1384.959233] env[62508]: INFO nova.compute.manager [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1384.959475] env[62508]: DEBUG oslo.service.loopingcall [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1384.959995] env[62508]: DEBUG nova.compute.manager [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1384.960121] env[62508]: DEBUG nova.network.neutron [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.034045] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775533, 'name': ReconfigVM_Task, 'duration_secs': 0.356952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.034350] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Reconfigured VM instance instance-00000017 to attach disk [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1385.035247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec6afa0-1ae0-4e4c-9b6d-d6fdab81afed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.069309] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5ecc9cd-5009-4649-89c4-927b67ff1383 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.087543] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52008883-c534-a88d-c1ee-1a0deaf3df73, 'name': SearchDatastore_Task, 'duration_secs': 0.02174} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.088812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.089186] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ce74cbd8-b709-418b-a206-f51975fd0af1/ce74cbd8-b709-418b-a206-f51975fd0af1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1385.089530] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1385.089530] env[62508]: value = "task-1775534" [ 1385.089530] env[62508]: _type = "Task" [ 1385.089530] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.089724] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-107200a6-38cc-4a35-9ab7-8ae9901b3c22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.100822] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775534, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.102447] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1385.102447] env[62508]: value = "task-1775535" [ 1385.102447] env[62508]: _type = "Task" [ 1385.102447] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.110937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "192995e7-82f5-41be-990d-d91b93f981e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.111238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.111454] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "192995e7-82f5-41be-990d-d91b93f981e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.111654] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.111841] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.113608] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.114419] env[62508]: INFO nova.compute.manager [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Terminating instance [ 1385.116348] env[62508]: DEBUG nova.compute.manager [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1385.116552] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.117412] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a564504a-75fc-4edd-8d5b-af56962d9e17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.121256] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1385.128100] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.128382] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bfc59d1-67f9-480f-864e-b8cbebb7aef5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.132953] env[62508]: DEBUG nova.network.neutron [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Updated VIF entry in instance network info cache for port 98a8b500-40eb-420e-8812-e8780d0c7c17. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.133211] env[62508]: DEBUG nova.network.neutron [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Updating instance_info_cache with network_info: [{"id": "98a8b500-40eb-420e-8812-e8780d0c7c17", "address": "fa:16:3e:d6:ae:4f", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a8b500-40", "ovs_interfaceid": "98a8b500-40eb-420e-8812-e8780d0c7c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.134497] env[62508]: DEBUG oslo_concurrency.lockutils [req-d319dc55-c4e8-460e-a18a-46acbaf9d142 req-fbb0b410-f474-4407-a131-f7e7f1f78a62 service nova] Releasing lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.136609] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.136785] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.141017] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1385.141017] env[62508]: value = "task-1775536" [ 1385.141017] env[62508]: _type = "Task" [ 1385.141017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.148237] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.170700] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1385.170983] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1385.171606] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1385.171606] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1385.171606] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1385.171818] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1385.171911] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1385.172093] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1385.172293] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1385.172467] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1385.172660] env[62508]: DEBUG nova.virt.hardware [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1385.173909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5612d5-2c1b-4828-a6ab-6009dcabd319 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.181946] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2501e6b7-6a3c-4d0f-8936-c9eea5673044 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.298464] env[62508]: DEBUG nova.scheduler.client.report [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1385.340389] env[62508]: INFO nova.compute.manager [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Took 49.99 seconds to build instance. [ 1385.428392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.428690] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.429092] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.429444] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.429444] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.432256] env[62508]: INFO nova.compute.manager [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Terminating instance [ 1385.434435] env[62508]: DEBUG nova.compute.manager [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1385.434696] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.435914] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854271e1-c30d-4ea6-a59e-de2f46523713 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.444240] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.444893] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7953521e-3937-4627-b927-a5477bd7bb71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.451430] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1385.451430] env[62508]: value = "task-1775537" [ 1385.451430] env[62508]: _type = "Task" [ 1385.451430] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.464601] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.602710] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775534, 'name': ReconfigVM_Task, 'duration_secs': 0.187894} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.603132] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1385.607054] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfc899e2-f270-4636-97db-3f6ae4c556a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.619127] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1385.619127] env[62508]: value = "task-1775538" [ 1385.619127] env[62508]: _type = "Task" [ 1385.619127] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.619539] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775535, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.631051] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775538, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.638389] env[62508]: DEBUG oslo_concurrency.lockutils [req-f36b18df-a173-4a72-88d8-fdc9e0348c38 req-ae8e2353-013e-4251-ae32-595dbb05898c service nova] Releasing lock "refresh_cache-ce74cbd8-b709-418b-a206-f51975fd0af1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.652197] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775536, 'name': PowerOffVM_Task, 'duration_secs': 0.269663} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.655198] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.655198] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.655198] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3e030a0-04fe-4760-9e10-54d3ca3e5296 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.691174] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.727757] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.727969] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.728227] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleting the datastore file [datastore1] 192995e7-82f5-41be-990d-d91b93f981e1 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.728512] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94b0555e-b0f4-450b-9fb4-0286df006891 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.735328] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1385.735328] env[62508]: value = "task-1775540" [ 1385.735328] env[62508]: _type = "Task" [ 1385.735328] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.743401] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.803631] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.804210] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1385.807756] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.420s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.808308] env[62508]: DEBUG nova.objects.instance [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lazy-loading 'resources' on Instance uuid ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.842050] env[62508]: DEBUG nova.network.neutron [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.842901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24bd263f-21bb-4aaf-adcd-179c9b8635be tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.464s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.963021] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775537, 'name': PowerOffVM_Task, 'duration_secs': 0.39126} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.963330] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.963525] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.963859] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b5d3f01-1dc4-4c01-933f-004e50be6a3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.991377] env[62508]: DEBUG nova.compute.manager [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-plugged-39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.991377] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.991377] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.991377] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.991377] env[62508]: DEBUG nova.compute.manager [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] No waiting events found dispatching network-vif-plugged-39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1385.991599] env[62508]: WARNING nova.compute.manager [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received unexpected event network-vif-plugged-39708f16-52cb-48ca-b3e5-da465cea2af7 for instance with vm_state building and task_state spawning. [ 1385.991599] env[62508]: DEBUG nova.compute.manager [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-changed-39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.991599] env[62508]: DEBUG nova.compute.manager [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing instance network info cache due to event network-changed-39708f16-52cb-48ca-b3e5-da465cea2af7. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1385.991599] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Acquiring lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.066068] env[62508]: DEBUG nova.compute.manager [req-94ceecb7-4b2f-4bc3-87d9-686c2d098693 req-0ca18ca7-b828-48b4-b77c-448e4ebe862c service nova] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Received event network-vif-deleted-d19e2f57-30f3-41cd-b87b-d8378a2a8cc0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1386.120041] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581123} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.120589] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ce74cbd8-b709-418b-a206-f51975fd0af1/ce74cbd8-b709-418b-a206-f51975fd0af1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.121359] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1386.124976] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44e144fd-61d1-434b-b484-725e818d0a64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.137454] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775538, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.139049] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1386.139049] env[62508]: value = "task-1775542" [ 1386.139049] env[62508]: _type = "Task" [ 1386.139049] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.160285] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1386.160285] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1386.160407] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleting the datastore file [datastore1] 03552483-a365-4d25-94bc-ea9b38ee6cd6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.160669] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.161083] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d8ec1c9-8205-4aef-9b4b-7c422d6e88d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.168151] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for the task: (returnval){ [ 1386.168151] env[62508]: value = "task-1775543" [ 1386.168151] env[62508]: _type = "Task" [ 1386.168151] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.180673] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.247352] env[62508]: DEBUG oslo_vmware.api [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.407311} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.247616] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.247811] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.248109] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.248379] env[62508]: INFO nova.compute.manager [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1386.248687] env[62508]: DEBUG oslo.service.loopingcall [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.248904] env[62508]: DEBUG nova.compute.manager [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1386.249029] env[62508]: DEBUG nova.network.neutron [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.312363] env[62508]: DEBUG nova.compute.utils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1386.317652] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1386.317758] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1386.348599] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1386.351256] env[62508]: INFO nova.compute.manager [-] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Took 1.39 seconds to deallocate network for instance. [ 1386.354758] env[62508]: DEBUG nova.network.neutron [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updating instance_info_cache with network_info: [{"id": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "address": "fa:16:3e:4d:29:e5", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93c4291d-01", "ovs_interfaceid": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "address": "fa:16:3e:24:54:4a", "network": {"id": "1925999e-2666-44aa-b3fa-d3c9e6d5e772", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-469447563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbbd6f2c-03", "ovs_interfaceid": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "39708f16-52cb-48ca-b3e5-da465cea2af7", "address": "fa:16:3e:17:96:a0", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39708f16-52", "ovs_interfaceid": "39708f16-52cb-48ca-b3e5-da465cea2af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.430211] env[62508]: DEBUG nova.policy [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8c1c06958b6473a99b192ea353596d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62c36aa1e15f4bfc83e5a9e5ce22d7d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1386.637691] env[62508]: DEBUG oslo_vmware.api [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775538, 'name': PowerOnVM_Task, 'duration_secs': 0.62859} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.639828] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1386.643148] env[62508]: DEBUG nova.compute.manager [None req-b432de5f-473d-4bfd-8778-0e8c71d74836 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1386.643148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af291680-3dae-4907-b600-ea2ddb46c6aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.661487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.661762] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.662029] env[62508]: INFO nova.compute.manager [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Rebooting instance [ 1386.664384] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080723} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.668021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.668021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebf0e57-aeb3-4077-9cc2-4b95babc4e63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.700725] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] ce74cbd8-b709-418b-a206-f51975fd0af1/ce74cbd8-b709-418b-a206-f51975fd0af1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.711736] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6621f66-35d1-473a-8b16-4234f6883b52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.728673] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.735917] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1386.735917] env[62508]: value = "task-1775544" [ 1386.735917] env[62508]: _type = "Task" [ 1386.735917] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.744027] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.823931] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1386.860392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.861140] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance network_info: |[{"id": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "address": "fa:16:3e:4d:29:e5", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93c4291d-01", "ovs_interfaceid": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "address": "fa:16:3e:24:54:4a", "network": {"id": "1925999e-2666-44aa-b3fa-d3c9e6d5e772", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-469447563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbbd6f2c-03", "ovs_interfaceid": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "39708f16-52cb-48ca-b3e5-da465cea2af7", "address": "fa:16:3e:17:96:a0", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39708f16-52", "ovs_interfaceid": "39708f16-52cb-48ca-b3e5-da465cea2af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1386.862028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.864288] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Acquired lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.864288] env[62508]: DEBUG nova.network.neutron [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Refreshing network info cache for port 39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.864288] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:29:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93c4291d-0197-4fb0-9a10-3b95a4a17e60', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:54:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbbd6f2c-037f-4a48-ad02-f3292bd93802', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:96:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39708f16-52cb-48ca-b3e5-da465cea2af7', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.884139] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Creating folder: Project (ae1e8f147289480aa4ecab1500a0e3cf). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.892902] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.894781] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d04e2f11-7144-4179-891f-2e11691a121d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.899300] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Successfully created port: 43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1386.911199] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Created folder: Project (ae1e8f147289480aa4ecab1500a0e3cf) in parent group-v368536. [ 1386.911293] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Creating folder: Instances. Parent ref: group-v368609. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.911562] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-526ecb12-4db7-45ae-b58d-dd14ea747e21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.922230] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Created folder: Instances in parent group-v368609. [ 1386.922489] env[62508]: DEBUG oslo.service.loopingcall [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.922945] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.922945] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17aa89f7-01fb-4904-86a6-4d6131fdf9d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.952646] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.952646] env[62508]: value = "task-1775547" [ 1386.952646] env[62508]: _type = "Task" [ 1386.952646] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.961558] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775547, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.963558] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfea3f4-361a-498f-8b6b-f19085f647f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.970481] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02364c3-9c56-4b21-9619-2aa5d401f0d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.005705] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85e6366-a775-4308-a6e0-b62540f29988 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.012666] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db6dda1-541c-4548-b568-4d27b50590bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.030184] env[62508]: DEBUG nova.compute.provider_tree [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.107442] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Successfully updated port: f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1387.214476] env[62508]: DEBUG oslo_vmware.api [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Task: {'id': task-1775543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.628981} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.215812] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1387.215812] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1387.215812] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1387.215812] env[62508]: INFO nova.compute.manager [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Took 1.78 seconds to destroy the instance on the hypervisor. [ 1387.215812] env[62508]: DEBUG oslo.service.loopingcall [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.215812] env[62508]: DEBUG nova.compute.manager [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1387.215812] env[62508]: DEBUG nova.network.neutron [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1387.228887] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.228887] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquired lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.228887] env[62508]: DEBUG nova.network.neutron [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.249794] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775544, 'name': ReconfigVM_Task, 'duration_secs': 0.295736} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.250154] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Reconfigured VM instance instance-0000001b to attach disk [datastore1] ce74cbd8-b709-418b-a206-f51975fd0af1/ce74cbd8-b709-418b-a206-f51975fd0af1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1387.250932] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41ba8234-51e1-4d06-b10a-b27f5bdbd92a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.258360] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1387.258360] env[62508]: value = "task-1775548" [ 1387.258360] env[62508]: _type = "Task" [ 1387.258360] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.267594] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775548, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.453736] env[62508]: DEBUG nova.network.neutron [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updated VIF entry in instance network info cache for port 39708f16-52cb-48ca-b3e5-da465cea2af7. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.454233] env[62508]: DEBUG nova.network.neutron [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updating instance_info_cache with network_info: [{"id": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "address": "fa:16:3e:4d:29:e5", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93c4291d-01", "ovs_interfaceid": "93c4291d-0197-4fb0-9a10-3b95a4a17e60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "address": "fa:16:3e:24:54:4a", "network": {"id": "1925999e-2666-44aa-b3fa-d3c9e6d5e772", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-469447563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbbd6f2c-03", "ovs_interfaceid": "fbbd6f2c-037f-4a48-ad02-f3292bd93802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "39708f16-52cb-48ca-b3e5-da465cea2af7", "address": "fa:16:3e:17:96:a0", "network": {"id": "de1b13e7-78cd-44cc-b552-21148d683df7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1661675640", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39708f16-52", "ovs_interfaceid": "39708f16-52cb-48ca-b3e5-da465cea2af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.467120] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775547, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.508666] env[62508]: DEBUG nova.network.neutron [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.537117] env[62508]: DEBUG nova.scheduler.client.report [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.612855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.613121] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquired lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.613255] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.769573] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775548, 'name': Rename_Task, 'duration_secs': 0.141601} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.769850] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1387.772305] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10f39a5f-b084-4e6d-ab57-d337e6d10636 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.779490] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1387.779490] env[62508]: value = "task-1775549" [ 1387.779490] env[62508]: _type = "Task" [ 1387.779490] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.787910] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.835951] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1387.864684] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1387.865228] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1387.865228] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.865528] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1387.865528] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.865628] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1387.865754] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1387.865920] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1387.866239] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1387.866387] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1387.866564] env[62508]: DEBUG nova.virt.hardware [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1387.867427] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7135b0f8-df50-4d94-9f8d-9612a1ff6d96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.878198] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79eec72-e232-4fe5-a309-10a64489747b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.960661] env[62508]: DEBUG oslo_concurrency.lockutils [req-0d5c2b2e-b004-407f-b3af-ccab15ae48bd req-17b09425-9e27-4447-b763-a6e6b07d2599 service nova] Releasing lock "refresh_cache-aae3b4a3-c954-4f73-bd12-9b19a675179c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.967169] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775547, 'name': CreateVM_Task, 'duration_secs': 0.656756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.967356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.968264] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.968436] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.968771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.969078] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e44c510a-216e-408c-a4eb-5b325eae4dfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.975100] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1387.975100] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b355c1-aefb-c41a-8cc7-143da302e03b" [ 1387.975100] env[62508]: _type = "Task" [ 1387.975100] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.984593] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b355c1-aefb-c41a-8cc7-143da302e03b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.011401] env[62508]: INFO nova.compute.manager [-] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Took 1.76 seconds to deallocate network for instance. [ 1388.043454] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.236s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.046796] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.025s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.048466] env[62508]: INFO nova.compute.claims [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.095173] env[62508]: INFO nova.scheduler.client.report [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Deleted allocations for instance ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd [ 1388.118708] env[62508]: DEBUG nova.network.neutron [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Updating instance_info_cache with network_info: [{"id": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "address": "fa:16:3e:53:8f:78", "network": {"id": "6de1c787-ad5d-4886-aba8-aef3c43f9b0e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-288908091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "259e35282a6d43778c432bcce94bd21d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214565f6-9a", "ovs_interfaceid": "214565f6-9a69-416b-9a71-6b98dcdf82a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.165657] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1388.237147] env[62508]: DEBUG nova.network.neutron [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.292478] env[62508]: DEBUG oslo_vmware.api [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775549, 'name': PowerOnVM_Task, 'duration_secs': 0.492188} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.292710] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1388.292929] env[62508]: INFO nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1388.293113] env[62508]: DEBUG nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1388.293916] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8540d1-d827-45d8-94ae-02b4fcd50b64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.367324] env[62508]: DEBUG nova.network.neutron [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Updating instance_info_cache with network_info: [{"id": "f6601a11-9230-42a6-969e-6d69816e0f4b", "address": "fa:16:3e:e0:4a:ac", "network": {"id": "6a3cd021-14c3-4ac5-b179-c0741b502c9a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1400392957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96ce65b1fa5f4a2aad45957e276145ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6601a11-92", "ovs_interfaceid": "f6601a11-9230-42a6-969e-6d69816e0f4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.460352] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Received event network-vif-plugged-f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1388.460583] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Acquiring lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.460819] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.460968] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.461164] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] No waiting events found dispatching network-vif-plugged-f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1388.461331] env[62508]: WARNING nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Received unexpected event network-vif-plugged-f6601a11-9230-42a6-969e-6d69816e0f4b for instance with vm_state building and task_state spawning. [ 1388.461490] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Received event network-vif-deleted-09f33fb7-0c57-46d2-b4ba-1ebe2b45daa7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1388.461653] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Received event network-changed-f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1388.461802] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Refreshing instance network info cache due to event network-changed-f6601a11-9230-42a6-969e-6d69816e0f4b. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1388.461962] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Acquiring lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.489080] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b355c1-aefb-c41a-8cc7-143da302e03b, 'name': SearchDatastore_Task, 'duration_secs': 0.009935} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.489483] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.489730] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.490084] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.490256] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.490520] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1388.490794] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eea5e064-f59f-4858-b6d6-1360e22a7662 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.505526] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1388.505734] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1388.506502] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e5d0220-2cc0-48da-9e3c-eb9a7125e42d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.516850] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1388.516850] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52924ef3-b389-9d67-a863-d74551598d22" [ 1388.516850] env[62508]: _type = "Task" [ 1388.516850] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.523147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.528945] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52924ef3-b389-9d67-a863-d74551598d22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.604408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3bfad8d7-a319-4b6a-906a-8c02419f8d23 tempest-FloatingIPsAssociationNegativeTestJSON-369724345 tempest-FloatingIPsAssociationNegativeTestJSON-369724345-project-member] Lock "ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.588s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.620954] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Releasing lock "refresh_cache-2f7b7109-0ced-4ea4-8dde-608655f2b3ab" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.628199] env[62508]: DEBUG nova.compute.manager [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1388.629115] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "63fca45d-5922-4a14-9936-30070c349f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.629529] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.634221] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3113d84f-37c4-4cb6-b434-e5d41c4924e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.741324] env[62508]: INFO nova.compute.manager [-] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Took 1.52 seconds to deallocate network for instance. [ 1388.813338] env[62508]: INFO nova.compute.manager [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Took 50.39 seconds to build instance. [ 1388.872602] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Releasing lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.876065] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Instance network_info: |[{"id": "f6601a11-9230-42a6-969e-6d69816e0f4b", "address": "fa:16:3e:e0:4a:ac", "network": {"id": "6a3cd021-14c3-4ac5-b179-c0741b502c9a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1400392957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96ce65b1fa5f4a2aad45957e276145ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6601a11-92", "ovs_interfaceid": "f6601a11-9230-42a6-969e-6d69816e0f4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1388.876065] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Acquired lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.876065] env[62508]: DEBUG nova.network.neutron [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Refreshing network info cache for port f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1388.876065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:4a:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f1b07b1-e4e5-4842-9090-07fb2c3e124b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6601a11-9230-42a6-969e-6d69816e0f4b', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1388.891926] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Creating folder: Project (96ce65b1fa5f4a2aad45957e276145ad). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.897818] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8be4fbd-11ea-49c5-a4f4-21e943a564be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.912881] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Created folder: Project (96ce65b1fa5f4a2aad45957e276145ad) in parent group-v368536. [ 1388.912881] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Creating folder: Instances. Parent ref: group-v368612. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.912881] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c14ef145-c32a-41e6-833d-d94ccd0fbdf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.920519] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Created folder: Instances in parent group-v368612. [ 1388.921098] env[62508]: DEBUG oslo.service.loopingcall [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1388.921098] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1388.921221] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6defcf19-d9a1-4c85-8ea1-3284990789e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.940408] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1388.940408] env[62508]: value = "task-1775552" [ 1388.940408] env[62508]: _type = "Task" [ 1388.940408] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.952477] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775552, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.975566] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Successfully updated port: 43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1389.029073] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52924ef3-b389-9d67-a863-d74551598d22, 'name': SearchDatastore_Task, 'duration_secs': 0.022561} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.033426] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b50aed6-3b8b-4c3e-90f3-d1c59541ebaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.038882] env[62508]: DEBUG nova.compute.manager [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Received event network-vif-plugged-43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1389.038956] env[62508]: DEBUG oslo_concurrency.lockutils [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] Acquiring lock "ffe54977-81c4-4842-9773-eed704a53ada-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.039437] env[62508]: DEBUG oslo_concurrency.lockutils [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] Lock "ffe54977-81c4-4842-9773-eed704a53ada-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.039610] env[62508]: DEBUG oslo_concurrency.lockutils [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] Lock "ffe54977-81c4-4842-9773-eed704a53ada-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.039781] env[62508]: DEBUG nova.compute.manager [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] No waiting events found dispatching network-vif-plugged-43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1389.040064] env[62508]: WARNING nova.compute.manager [req-90bf305d-5b84-4189-987d-ea8ccb6b168c req-d53379e7-546c-4e85-a362-600c231ffbe4 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Received unexpected event network-vif-plugged-43717415-1abf-4418-8d1c-38df0c6f0ea9 for instance with vm_state building and task_state spawning. [ 1389.046275] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1389.046275] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52237fbf-feb5-9ea1-4b1c-ab70e1193148" [ 1389.046275] env[62508]: _type = "Task" [ 1389.046275] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.066647] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52237fbf-feb5-9ea1-4b1c-ab70e1193148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.238490] env[62508]: DEBUG nova.network.neutron [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Updated VIF entry in instance network info cache for port f6601a11-9230-42a6-969e-6d69816e0f4b. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1389.238882] env[62508]: DEBUG nova.network.neutron [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Updating instance_info_cache with network_info: [{"id": "f6601a11-9230-42a6-969e-6d69816e0f4b", "address": "fa:16:3e:e0:4a:ac", "network": {"id": "6a3cd021-14c3-4ac5-b179-c0741b502c9a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1400392957-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96ce65b1fa5f4a2aad45957e276145ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6601a11-92", "ovs_interfaceid": "f6601a11-9230-42a6-969e-6d69816e0f4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.247440] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.317768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1754bb73-8ff1-4dfb-aae3-5ac14e4ef00d tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.561s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.455101] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775552, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.483746] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.483896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.484055] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.558713] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52237fbf-feb5-9ea1-4b1c-ab70e1193148, 'name': SearchDatastore_Task, 'duration_secs': 0.016912} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.558972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.559247] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aae3b4a3-c954-4f73-bd12-9b19a675179c/aae3b4a3-c954-4f73-bd12-9b19a675179c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.559809] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08eb9de3-78e9-4d9a-8998-ebfc8ab2d0c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.565705] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b32c9cf-2f91-43c4-89be-d9b147bf58d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.569644] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1389.569644] env[62508]: value = "task-1775553" [ 1389.569644] env[62508]: _type = "Task" [ 1389.569644] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.576574] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54bf154-31ca-45fc-83c8-fa46529c2022 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.583382] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.615378] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d43e160-032f-4fbd-a6ce-01bea0575f3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.623800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39f9eb5-a46a-4b54-a749-a04db4d4ce68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.646153] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.646402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.646813] env[62508]: DEBUG nova.compute.provider_tree [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.654510] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f9d033-1ad0-41cf-b1cb-3c8a00e9009e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.662779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Doing hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1389.663831] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d985efe7-8618-4c93-951d-faea420ffd8a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.671174] env[62508]: DEBUG oslo_vmware.api [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1389.671174] env[62508]: value = "task-1775554" [ 1389.671174] env[62508]: _type = "Task" [ 1389.671174] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.680038] env[62508]: DEBUG oslo_vmware.api [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775554, 'name': ResetVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.743188] env[62508]: DEBUG oslo_concurrency.lockutils [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] Releasing lock "refresh_cache-a239d78f-085a-4e5c-924d-cf338298fa73" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.743480] env[62508]: DEBUG nova.compute.manager [req-293c129c-854c-414b-a40a-9910e505a41e req-ee8b81c3-9da1-413e-9f3b-bb0168f2963a service nova] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Received event network-vif-deleted-2fb8da4e-64be-4341-9f85-2c75e0c67dfb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1389.819143] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1389.954017] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775552, 'name': CreateVM_Task, 'duration_secs': 0.669047} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.954392] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.955037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.955219] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.955585] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.956122] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc306fb-dc25-44cb-8743-75c2358f209f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.961767] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1389.961767] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5286b6c0-9e40-694e-012f-02bfeab0253c" [ 1389.961767] env[62508]: _type = "Task" [ 1389.961767] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.971883] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5286b6c0-9e40-694e-012f-02bfeab0253c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.029019] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1390.082316] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775553, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.150389] env[62508]: DEBUG nova.scheduler.client.report [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.183102] env[62508]: DEBUG oslo_vmware.api [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775554, 'name': ResetVM_Task, 'duration_secs': 0.098133} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.183683] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Did hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1390.183683] env[62508]: DEBUG nova.compute.manager [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1390.184471] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25898ad-aba3-4db8-9c96-bc4aa67c8566 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.282926] env[62508]: DEBUG nova.network.neutron [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Updating instance_info_cache with network_info: [{"id": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "address": "fa:16:3e:fd:e7:ee", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43717415-1a", "ovs_interfaceid": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.345392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.472387] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5286b6c0-9e40-694e-012f-02bfeab0253c, 'name': SearchDatastore_Task, 'duration_secs': 0.063632} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.472697] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.472926] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.473226] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.473397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.473623] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.473821] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58b6c8ce-506e-415d-8b66-6d574be7c438 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.485866] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.486070] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.486802] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56cc9dc-9a4f-4fd0-bbfa-c80baf5e56bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.493723] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1390.493723] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b104a3-7b2f-aa49-8400-f9e11e439b64" [ 1390.493723] env[62508]: _type = "Task" [ 1390.493723] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.500880] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b104a3-7b2f-aa49-8400-f9e11e439b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.560040] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "ce74cbd8-b709-418b-a206-f51975fd0af1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.560295] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.560500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.560679] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.560845] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.565439] env[62508]: INFO nova.compute.manager [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Terminating instance [ 1390.569011] env[62508]: DEBUG nova.compute.manager [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1390.569222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1390.570053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9774fc88-3133-487f-af2d-2457b4f5a5ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.580505] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1390.583083] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b62f50d-e425-4f32-8a89-9f39947dbd7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.584473] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824857} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.584700] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aae3b4a3-c954-4f73-bd12-9b19a675179c/aae3b4a3-c954-4f73-bd12-9b19a675179c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.585276] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.585386] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0615dc89-6aae-43fb-ad2c-54781d363120 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.590755] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1390.590755] env[62508]: value = "task-1775555" [ 1390.590755] env[62508]: _type = "Task" [ 1390.590755] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.591922] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1390.591922] env[62508]: value = "task-1775556" [ 1390.591922] env[62508]: _type = "Task" [ 1390.591922] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.602231] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.605835] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.655717] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.656328] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1390.658970] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 26.949s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.700223] env[62508]: DEBUG oslo_concurrency.lockutils [None req-787f6d34-f1c7-4904-ae1e-b626223c1daa tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.038s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.789147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.789147] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Instance network_info: |[{"id": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "address": "fa:16:3e:fd:e7:ee", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43717415-1a", "ovs_interfaceid": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1390.789147] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:e7:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43717415-1abf-4418-8d1c-38df0c6f0ea9', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.804230] env[62508]: DEBUG oslo.service.loopingcall [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.804661] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.804772] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff85f27a-a22e-423b-9a44-b7a863a65835 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.828178] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.828178] env[62508]: value = "task-1775557" [ 1390.828178] env[62508]: _type = "Task" [ 1390.828178] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.836041] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775557, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.006404] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b104a3-7b2f-aa49-8400-f9e11e439b64, 'name': SearchDatastore_Task, 'duration_secs': 0.072672} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.007366] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71608e8-65ef-4076-a8b8-c0c247da456d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.013394] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1391.013394] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291510e-b1dd-a73b-bfc0-2af2432ae354" [ 1391.013394] env[62508]: _type = "Task" [ 1391.013394] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.024018] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291510e-b1dd-a73b-bfc0-2af2432ae354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.104013] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210986} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.107023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1391.107374] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775555, 'name': PowerOffVM_Task, 'duration_secs': 0.233712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.108071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea02d08c-97a8-47d9-9383-cc0faa4de799 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.110570] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1391.110742] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1391.111033] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a99f69e8-48d4-4c63-bfff-07ad4fed4121 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.152030] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] aae3b4a3-c954-4f73-bd12-9b19a675179c/aae3b4a3-c954-4f73-bd12-9b19a675179c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1391.152030] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3be7109c-dbfc-48e0-93d2-6e06e13524b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.169796] env[62508]: DEBUG nova.compute.utils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.174183] env[62508]: DEBUG nova.compute.manager [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Received event network-changed-43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1391.174387] env[62508]: DEBUG nova.compute.manager [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Refreshing instance network info cache due to event network-changed-43717415-1abf-4418-8d1c-38df0c6f0ea9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1391.174651] env[62508]: DEBUG oslo_concurrency.lockutils [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] Acquiring lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.174781] env[62508]: DEBUG oslo_concurrency.lockutils [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] Acquired lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.175093] env[62508]: DEBUG nova.network.neutron [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Refreshing network info cache for port 43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.176357] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1391.176509] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.186401] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1391.186401] env[62508]: value = "task-1775559" [ 1391.186401] env[62508]: _type = "Task" [ 1391.186401] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.195051] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.215628] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.215879] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.216103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.216290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.216458] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.219071] env[62508]: INFO nova.compute.manager [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Terminating instance [ 1391.221807] env[62508]: DEBUG nova.compute.manager [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1391.222034] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1391.222906] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed5ec93-bf34-428e-a545-9139a9945e13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.233802] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1391.234296] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-095872b3-d8cb-4607-83fc-640cb8fdbf7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.240641] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1391.240641] env[62508]: value = "task-1775560" [ 1391.240641] env[62508]: _type = "Task" [ 1391.240641] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.245333] env[62508]: DEBUG nova.policy [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef2703b6313a4097873da475599d5739', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ab9b261e66e453b88cdd769dca47239', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.252591] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.345347] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775557, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.524616] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291510e-b1dd-a73b-bfc0-2af2432ae354, 'name': SearchDatastore_Task, 'duration_secs': 0.043029} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.524877] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.525306] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a239d78f-085a-4e5c-924d-cf338298fa73/a239d78f-085a-4e5c-924d-cf338298fa73.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1391.525571] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8fb5745-745f-46aa-8324-990ca62adf7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.535161] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1391.535161] env[62508]: value = "task-1775561" [ 1391.535161] env[62508]: _type = "Task" [ 1391.535161] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.542845] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.587048] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1391.587336] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1391.587622] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Deleting the datastore file [datastore1] ce74cbd8-b709-418b-a206-f51975fd0af1 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1391.588058] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5e20105-7a31-442f-af07-947be64c4fe6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.596024] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for the task: (returnval){ [ 1391.596024] env[62508]: value = "task-1775562" [ 1391.596024] env[62508]: _type = "Task" [ 1391.596024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.605199] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.672913] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234fc4bc-d74e-41c4-b494-a1a4009670e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.677444] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1391.685858] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a84d762-67f6-4b21-8508-9443793658d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.699950] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775559, 'name': ReconfigVM_Task, 'duration_secs': 0.475281} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.728676] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Reconfigured VM instance instance-0000001a to attach disk [datastore1] aae3b4a3-c954-4f73-bd12-9b19a675179c/aae3b4a3-c954-4f73-bd12-9b19a675179c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.731637] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Successfully created port: 61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1391.735464] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6efb419b-dba1-494c-932b-6c7ce3944b20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.737684] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eda162a-16c8-4e25-9f48-a4413f07bded {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.748599] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1391.748599] env[62508]: value = "task-1775563" [ 1391.748599] env[62508]: _type = "Task" [ 1391.748599] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.750189] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee99eb9-e621-40d4-a543-c7b4c521c0ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.760512] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775560, 'name': PowerOffVM_Task, 'duration_secs': 0.21593} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.761265] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1391.761450] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1391.762041] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b7e1d06-1285-40ea-a6fc-a66fb12da762 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.774791] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775563, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.775323] env[62508]: DEBUG nova.compute.provider_tree [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1391.840242] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775557, 'name': CreateVM_Task, 'duration_secs': 0.906718} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.840530] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.841321] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.841494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.841853] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1391.842479] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e83b2244-c439-4515-8b47-edc38da8a8f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.849233] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1391.849233] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5200ded7-2a4d-b180-3853-86abb08fdaa3" [ 1391.849233] env[62508]: _type = "Task" [ 1391.849233] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.861495] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5200ded7-2a4d-b180-3853-86abb08fdaa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.944302] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1391.944528] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1391.944765] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Deleting the datastore file [datastore1] 2f7b7109-0ced-4ea4-8dde-608655f2b3ab {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1391.945436] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bd9ed08-d47c-4693-bf60-2113808b13ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.952995] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for the task: (returnval){ [ 1391.952995] env[62508]: value = "task-1775565" [ 1391.952995] env[62508]: _type = "Task" [ 1391.952995] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.965429] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.057910] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775561, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.112980] env[62508]: DEBUG oslo_vmware.api [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Task: {'id': task-1775562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30016} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.113439] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.114173] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1392.114905] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1392.115189] env[62508]: INFO nova.compute.manager [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Took 1.55 seconds to destroy the instance on the hypervisor. [ 1392.117024] env[62508]: DEBUG oslo.service.loopingcall [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1392.117024] env[62508]: DEBUG nova.compute.manager [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1392.117024] env[62508]: DEBUG nova.network.neutron [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1392.200818] env[62508]: DEBUG nova.network.neutron [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Updated VIF entry in instance network info cache for port 43717415-1abf-4418-8d1c-38df0c6f0ea9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1392.200818] env[62508]: DEBUG nova.network.neutron [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Updating instance_info_cache with network_info: [{"id": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "address": "fa:16:3e:fd:e7:ee", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43717415-1a", "ovs_interfaceid": "43717415-1abf-4418-8d1c-38df0c6f0ea9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.264843] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775563, 'name': Rename_Task, 'duration_secs': 0.465125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.265236] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.265556] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb044dad-86ef-4020-9940-46a5f0726a73 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.275695] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1392.275695] env[62508]: value = "task-1775566" [ 1392.275695] env[62508]: _type = "Task" [ 1392.275695] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.289711] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.319020] env[62508]: ERROR nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [req-175c6fc1-27d1-4b65-9251-d028621dfd79] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-175c6fc1-27d1-4b65-9251-d028621dfd79"}]} [ 1392.343872] env[62508]: DEBUG nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1392.364610] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5200ded7-2a4d-b180-3853-86abb08fdaa3, 'name': SearchDatastore_Task, 'duration_secs': 0.063556} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.365593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.365842] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.366085] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.366239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.366420] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.366688] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-413d9d3d-ce68-40d1-b7ea-8c7e20873206 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.373097] env[62508]: DEBUG nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1392.373296] env[62508]: DEBUG nova.compute.provider_tree [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1392.378340] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.378517] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.379375] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d72b4ee-4036-43f0-823c-0d6aa5806acd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.384508] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1392.384508] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525801df-75c0-1d22-6ee6-3148ac155b60" [ 1392.384508] env[62508]: _type = "Task" [ 1392.384508] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.389865] env[62508]: DEBUG nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1392.394892] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525801df-75c0-1d22-6ee6-3148ac155b60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.417058] env[62508]: DEBUG nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1392.466552] env[62508]: DEBUG oslo_vmware.api [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Task: {'id': task-1775565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295819} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.466643] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.466799] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1392.466975] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1392.467235] env[62508]: INFO nova.compute.manager [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1392.467501] env[62508]: DEBUG oslo.service.loopingcall [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1392.470648] env[62508]: DEBUG nova.compute.manager [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1392.470648] env[62508]: DEBUG nova.network.neutron [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1392.549346] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775561, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593979} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.549609] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a239d78f-085a-4e5c-924d-cf338298fa73/a239d78f-085a-4e5c-924d-cf338298fa73.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.549849] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1392.550133] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b0afee0-6369-4c07-815b-bf41cb9a57d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.557971] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1392.557971] env[62508]: value = "task-1775567" [ 1392.557971] env[62508]: _type = "Task" [ 1392.557971] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.565821] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.597993] env[62508]: DEBUG nova.compute.manager [req-febd2d11-76e4-4fa5-86ee-97ad61358784 req-fc586d4f-a8ad-4dae-9e28-ef386f8f9501 service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Received event network-vif-deleted-98a8b500-40eb-420e-8812-e8780d0c7c17 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1392.598231] env[62508]: INFO nova.compute.manager [req-febd2d11-76e4-4fa5-86ee-97ad61358784 req-fc586d4f-a8ad-4dae-9e28-ef386f8f9501 service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Neutron deleted interface 98a8b500-40eb-420e-8812-e8780d0c7c17; detaching it from the instance and deleting it from the info cache [ 1392.598405] env[62508]: DEBUG nova.network.neutron [req-febd2d11-76e4-4fa5-86ee-97ad61358784 req-fc586d4f-a8ad-4dae-9e28-ef386f8f9501 service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.692544] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1392.711277] env[62508]: DEBUG oslo_concurrency.lockutils [req-d1c6fc1e-fdcb-4540-b452-8b6a4b6efe03 req-7f99abd2-0aa6-41c0-9bd0-f96d22431d79 service nova] Releasing lock "refresh_cache-ffe54977-81c4-4842-9773-eed704a53ada" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.720257] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1392.720519] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1392.720704] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1392.720910] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1392.721095] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1392.721256] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1392.721463] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1392.721619] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1392.721818] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1392.721994] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1392.722183] env[62508]: DEBUG nova.virt.hardware [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1392.723088] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c5d656-a2a3-4b77-9fc0-7c4e8e2ba71c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.737843] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3740bd34-8bc9-4225-ae26-27962f760471 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.794108] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775566, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.900669] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525801df-75c0-1d22-6ee6-3148ac155b60, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.903909] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78856ca8-6688-4a71-a7e0-a02ebb075631 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.907244] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1392.907244] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d41c-a004-191c-f7ec-d8813c1a8f91" [ 1392.907244] env[62508]: _type = "Task" [ 1392.907244] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.919644] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d41c-a004-191c-f7ec-d8813c1a8f91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.923213] env[62508]: DEBUG nova.network.neutron [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.979206] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636972ee-d9a9-4b3d-8d51-80be81284f99 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.988071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48be6e0-4bbc-4c4f-a425-574ebf1ce874 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.021909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff940e00-03d9-4801-be7d-2ad1f2c2a6aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.030194] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b551794b-6663-4172-af90-a462e9c8185c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.043930] env[62508]: DEBUG nova.compute.provider_tree [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.068000] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070032} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.068399] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.069125] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550e16c9-ceb4-45a1-b9c8-01ad75de724f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.099148] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] a239d78f-085a-4e5c-924d-cf338298fa73/a239d78f-085a-4e5c-924d-cf338298fa73.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.099422] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a7f6d0f-a93c-4f08-b06c-b34eb6345451 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.114117] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99b8d526-60c3-45a1-b23e-4eeb1d0fd663 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.125059] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb7b03d-2936-435b-a931-a3b32ef0a086 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.135756] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1393.135756] env[62508]: value = "task-1775568" [ 1393.135756] env[62508]: _type = "Task" [ 1393.135756] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.156931] env[62508]: DEBUG nova.compute.manager [req-febd2d11-76e4-4fa5-86ee-97ad61358784 req-fc586d4f-a8ad-4dae-9e28-ef386f8f9501 service nova] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Detach interface failed, port_id=98a8b500-40eb-420e-8812-e8780d0c7c17, reason: Instance ce74cbd8-b709-418b-a206-f51975fd0af1 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1393.160538] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.273962] env[62508]: DEBUG nova.network.neutron [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.291971] env[62508]: DEBUG oslo_vmware.api [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775566, 'name': PowerOnVM_Task, 'duration_secs': 0.522505} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.292870] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.293097] env[62508]: INFO nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Took 16.80 seconds to spawn the instance on the hypervisor. [ 1393.293283] env[62508]: DEBUG nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1393.294441] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144425f7-8763-4486-a3f1-b4fefe10e231 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.418258] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d41c-a004-191c-f7ec-d8813c1a8f91, 'name': SearchDatastore_Task, 'duration_secs': 0.042121} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.418559] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.418817] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ffe54977-81c4-4842-9773-eed704a53ada/ffe54977-81c4-4842-9773-eed704a53ada.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.419160] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6e38309-9568-40a7-ac9f-cb9f7a1214f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.426604] env[62508]: INFO nova.compute.manager [-] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Took 1.31 seconds to deallocate network for instance. [ 1393.426946] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1393.426946] env[62508]: value = "task-1775569" [ 1393.426946] env[62508]: _type = "Task" [ 1393.426946] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.442362] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.548390] env[62508]: DEBUG nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1393.649909] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.702323] env[62508]: DEBUG nova.compute.manager [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Received event network-vif-plugged-61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1393.702480] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] Acquiring lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.702749] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.702963] env[62508]: DEBUG oslo_concurrency.lockutils [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.703282] env[62508]: DEBUG nova.compute.manager [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] No waiting events found dispatching network-vif-plugged-61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1393.703465] env[62508]: WARNING nova.compute.manager [req-7f3f04ea-5de5-4adb-be91-6e415f217f45 req-5a9dc40e-907e-4051-b410-55b978d35037 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Received unexpected event network-vif-plugged-61d5f183-d4da-47f0-814e-82621dbc6657 for instance with vm_state building and task_state spawning. [ 1393.750860] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Successfully updated port: 61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1393.776481] env[62508]: INFO nova.compute.manager [-] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Took 1.31 seconds to deallocate network for instance. [ 1393.814550] env[62508]: INFO nova.compute.manager [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Took 58.36 seconds to build instance. [ 1393.937603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.941941] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775569, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.151852] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775568, 'name': ReconfigVM_Task, 'duration_secs': 0.711889} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.152800] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Reconfigured VM instance instance-0000001c to attach disk [datastore1] a239d78f-085a-4e5c-924d-cf338298fa73/a239d78f-085a-4e5c-924d-cf338298fa73.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.153854] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82b214ff-fe68-419f-a8f3-e3f97397f623 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.161811] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1394.161811] env[62508]: value = "task-1775570" [ 1394.161811] env[62508]: _type = "Task" [ 1394.161811] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.170137] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775570, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.254499] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.254499] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquired lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.254499] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.283658] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.288248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.317119] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a039ff86-e3a9-46aa-9d39-301b85c42873 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.920s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.318614] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.030s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.318838] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.319069] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.319268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.322078] env[62508]: INFO nova.compute.manager [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Terminating instance [ 1394.323233] env[62508]: DEBUG nova.compute.manager [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1394.323487] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1394.324760] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1af532-fdeb-48ab-8d7e-3dc8d1424770 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.333080] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1394.333323] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdb16f35-0e8b-43ed-a6ad-7388efd5483a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.339372] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1394.339372] env[62508]: value = "task-1775571" [ 1394.339372] env[62508]: _type = "Task" [ 1394.339372] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.347742] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.442509] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638391} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.442779] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] ffe54977-81c4-4842-9773-eed704a53ada/ffe54977-81c4-4842-9773-eed704a53ada.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.442964] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.443225] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73b9983a-7c5c-4e4b-a90d-844a18884e22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.449253] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1394.449253] env[62508]: value = "task-1775572" [ 1394.449253] env[62508]: _type = "Task" [ 1394.449253] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.456713] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.560927] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.902s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.564167] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.487s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.565731] env[62508]: INFO nova.compute.claims [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.623270] env[62508]: DEBUG nova.compute.manager [req-7431d476-8b30-43e6-9238-f3b53c04efc0 req-4c8e5200-7675-4a6c-abdb-e2ff779e56d4 service nova] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Received event network-vif-deleted-214565f6-9a69-416b-9a71-6b98dcdf82a2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.672639] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775570, 'name': Rename_Task, 'duration_secs': 0.176087} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.672950] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.673263] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a519eb59-5ea6-49f8-8a18-1a46d6990c96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.679925] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1394.679925] env[62508]: value = "task-1775573" [ 1394.679925] env[62508]: _type = "Task" [ 1394.679925] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.690190] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.786294] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1394.822377] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1394.849744] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775571, 'name': PowerOffVM_Task, 'duration_secs': 0.188081} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.849744] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1394.849906] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1394.850172] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24ddfcc8-910f-4a3d-abe6-476e01fa53f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.933168] env[62508]: DEBUG nova.network.neutron [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Updating instance_info_cache with network_info: [{"id": "61d5f183-d4da-47f0-814e-82621dbc6657", "address": "fa:16:3e:c0:34:dd", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d5f183-d4", "ovs_interfaceid": "61d5f183-d4da-47f0-814e-82621dbc6657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.960546] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070023} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.960796] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1394.961869] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856230c0-20c4-4691-a9e1-ca2217172560 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.987725] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] ffe54977-81c4-4842-9773-eed704a53ada/ffe54977-81c4-4842-9773-eed704a53ada.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.988078] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9d46e6a-8911-428c-ad30-808dab8ade58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.008552] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1395.008552] env[62508]: value = "task-1775575" [ 1395.008552] env[62508]: _type = "Task" [ 1395.008552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.017920] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775575, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.105949] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.106196] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.106385] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleting the datastore file [datastore1] aae3b4a3-c954-4f73-bd12-9b19a675179c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.106729] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10fe260a-88ee-491c-a6a6-0613c40533cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.114372] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1395.114372] env[62508]: value = "task-1775576" [ 1395.114372] env[62508]: _type = "Task" [ 1395.114372] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.122943] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.148760] env[62508]: INFO nova.scheduler.client.report [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleted allocation for migration 9db522bb-cefe-493d-ba92-c293c83e3634 [ 1395.191141] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775573, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.436712] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.437276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Releasing lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.437605] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Instance network_info: |[{"id": "61d5f183-d4da-47f0-814e-82621dbc6657", "address": "fa:16:3e:c0:34:dd", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d5f183-d4", "ovs_interfaceid": "61d5f183-d4da-47f0-814e-82621dbc6657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1395.438016] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:34:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61d5f183-d4da-47f0-814e-82621dbc6657', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1395.445905] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Creating folder: Project (8ab9b261e66e453b88cdd769dca47239). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.446338] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e3a6a0a-a9a5-46c9-a817-15a9bcea7e72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.457088] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Created folder: Project (8ab9b261e66e453b88cdd769dca47239) in parent group-v368536. [ 1395.457088] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Creating folder: Instances. Parent ref: group-v368616. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.457189] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25b99d81-3a1e-4d4b-b9d5-1d9be696c79f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.467190] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Created folder: Instances in parent group-v368616. [ 1395.467459] env[62508]: DEBUG oslo.service.loopingcall [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1395.467659] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1395.467867] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9182bda8-5a3d-448c-9d03-09be822e4dbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.488162] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1395.488162] env[62508]: value = "task-1775579" [ 1395.488162] env[62508]: _type = "Task" [ 1395.488162] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.496221] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775579, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.518641] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775575, 'name': ReconfigVM_Task, 'duration_secs': 0.35167} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.519050] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Reconfigured VM instance instance-0000001d to attach disk [datastore1] ffe54977-81c4-4842-9773-eed704a53ada/ffe54977-81c4-4842-9773-eed704a53ada.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.519697] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83984853-648d-4180-abd0-cb011c9208bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.525933] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1395.525933] env[62508]: value = "task-1775580" [ 1395.525933] env[62508]: _type = "Task" [ 1395.525933] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.541486] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775580, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.627960] env[62508]: DEBUG oslo_vmware.api [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183245} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.629661] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.630281] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1395.630502] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1395.630688] env[62508]: INFO nova.compute.manager [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1395.630931] env[62508]: DEBUG oslo.service.loopingcall [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1395.631180] env[62508]: DEBUG nova.compute.manager [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1395.631282] env[62508]: DEBUG nova.network.neutron [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1395.657643] env[62508]: DEBUG oslo_concurrency.lockutils [None req-157c237e-e27b-4a0f-ad64-6a236bcb01d5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.537s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.695080] env[62508]: DEBUG oslo_vmware.api [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775573, 'name': PowerOnVM_Task, 'duration_secs': 0.784545} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.695588] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.695786] env[62508]: INFO nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Took 10.57 seconds to spawn the instance on the hypervisor. [ 1395.695969] env[62508]: DEBUG nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.696784] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe9f3bd-b41d-42e8-9e16-c790018906c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.775326] env[62508]: DEBUG nova.compute.manager [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Received event network-changed-61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1395.775445] env[62508]: DEBUG nova.compute.manager [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Refreshing instance network info cache due to event network-changed-61d5f183-d4da-47f0-814e-82621dbc6657. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1395.775658] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] Acquiring lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.775805] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] Acquired lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.775964] env[62508]: DEBUG nova.network.neutron [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Refreshing network info cache for port 61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.002206] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775579, 'name': CreateVM_Task, 'duration_secs': 0.412989} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.002206] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1396.003020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.003339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.003636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1396.003899] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce57b505-caa2-4152-8ee0-ff7337ee7098 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.009081] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1396.009081] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f0a5b-663a-2268-b23b-7fa92995a1e9" [ 1396.009081] env[62508]: _type = "Task" [ 1396.009081] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.019219] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f0a5b-663a-2268-b23b-7fa92995a1e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.041803] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775580, 'name': Rename_Task, 'duration_secs': 0.187111} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.042140] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1396.042764] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f6e39ce-575b-4df3-972f-1f1623a95770 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.051280] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1396.051280] env[62508]: value = "task-1775581" [ 1396.051280] env[62508]: _type = "Task" [ 1396.051280] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.060650] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef0489e-a103-489e-bb6e-8bf5349678ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.067926] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.073068] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdd73a0-7d12-4aa2-a1d7-f3f1ce7d4f24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.119746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad333b53-6236-408a-ae45-2d02b9fd35bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.128897] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4406255b-170c-4b6b-ab00-5ecbf4bc4814 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.145062] env[62508]: DEBUG nova.compute.provider_tree [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.215870] env[62508]: INFO nova.compute.manager [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Took 50.55 seconds to build instance. [ 1396.524945] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f0a5b-663a-2268-b23b-7fa92995a1e9, 'name': SearchDatastore_Task, 'duration_secs': 0.017893} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.525432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.525775] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1396.526135] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.526135] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.526352] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1396.526529] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8415c765-8013-4520-8102-e6028bbdac0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.537690] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1396.537927] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1396.541309] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2f91191-7b86-4fd4-9457-3d92041a9780 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.548357] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1396.548357] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52726409-4d8f-7a47-514e-e282b6222108" [ 1396.548357] env[62508]: _type = "Task" [ 1396.548357] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.557761] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52726409-4d8f-7a47-514e-e282b6222108, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.563551] env[62508]: DEBUG oslo_vmware.api [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775581, 'name': PowerOnVM_Task, 'duration_secs': 0.484515} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.563551] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1396.566214] env[62508]: INFO nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1396.566446] env[62508]: DEBUG nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1396.567235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38707987-4198-49c0-a239-5bfe7d3339fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.605076] env[62508]: DEBUG nova.network.neutron [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Updated VIF entry in instance network info cache for port 61d5f183-d4da-47f0-814e-82621dbc6657. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.605468] env[62508]: DEBUG nova.network.neutron [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Updating instance_info_cache with network_info: [{"id": "61d5f183-d4da-47f0-814e-82621dbc6657", "address": "fa:16:3e:c0:34:dd", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d5f183-d4", "ovs_interfaceid": "61d5f183-d4da-47f0-814e-82621dbc6657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.649317] env[62508]: DEBUG nova.scheduler.client.report [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1396.721839] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6139d89d-6642-46ea-b59e-d3706c495fb5 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.129s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.061106] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52726409-4d8f-7a47-514e-e282b6222108, 'name': SearchDatastore_Task, 'duration_secs': 0.009691} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.061985] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4c4b075-af5b-4a61-925f-f9236c45f584 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.067527] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1397.067527] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d96b7-f64c-ecfb-c93c-7edc97c7607e" [ 1397.067527] env[62508]: _type = "Task" [ 1397.067527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.075637] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d96b7-f64c-ecfb-c93c-7edc97c7607e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.083418] env[62508]: INFO nova.compute.manager [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Took 45.45 seconds to build instance. [ 1397.107873] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e8ebb3b-fe60-48f4-9411-d512727a3604 req-f32ee6d8-02cd-456c-9283-dc2a4f47b42c service nova] Releasing lock "refresh_cache-18ee140a-97bd-439a-8027-0dd0a1f0a6e2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.156211] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.156847] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1397.159357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.257s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.160823] env[62508]: INFO nova.compute.claims [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.184669] env[62508]: DEBUG nova.network.neutron [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.224094] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1397.395045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "a239d78f-085a-4e5c-924d-cf338298fa73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.395045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.395045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.395233] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.395289] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.398907] env[62508]: INFO nova.compute.manager [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Terminating instance [ 1397.399915] env[62508]: DEBUG nova.compute.manager [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1397.400217] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.401040] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efda6811-5fcf-4d58-9a99-1e1f8a58bb2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.410511] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.411319] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ac63ab6-4302-4e3e-a0db-cc643f8c579d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.418424] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1397.418424] env[62508]: value = "task-1775582" [ 1397.418424] env[62508]: _type = "Task" [ 1397.418424] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.427041] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.577929] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d96b7-f64c-ecfb-c93c-7edc97c7607e, 'name': SearchDatastore_Task, 'duration_secs': 0.015858} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.578294] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.578638] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 18ee140a-97bd-439a-8027-0dd0a1f0a6e2/18ee140a-97bd-439a-8027-0dd0a1f0a6e2.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1397.578981] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42e65fda-b0e7-4267-9e48-ded848b677ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.585756] env[62508]: DEBUG oslo_concurrency.lockutils [None req-43cdd5bc-9fcc-42bc-af9e-2f3cd7793999 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.275s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.586073] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1397.586073] env[62508]: value = "task-1775583" [ 1397.586073] env[62508]: _type = "Task" [ 1397.586073] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.595894] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775583, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.665132] env[62508]: DEBUG nova.compute.utils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.668691] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1397.668872] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1397.687323] env[62508]: INFO nova.compute.manager [-] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Took 2.06 seconds to deallocate network for instance. [ 1397.735210] env[62508]: DEBUG nova.policy [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64a1365de2b7431ebbc9b5066dd0f974', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34358e29dde46139ee4aa5c8f57d0d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1397.742160] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.844141] env[62508]: DEBUG nova.compute.manager [req-a076dbe2-1da6-4496-ba74-e2fa1ea7b091 req-c756335b-2cd8-4742-ace7-656363baed50 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-deleted-39708f16-52cb-48ca-b3e5-da465cea2af7 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.844456] env[62508]: DEBUG nova.compute.manager [req-a076dbe2-1da6-4496-ba74-e2fa1ea7b091 req-c756335b-2cd8-4742-ace7-656363baed50 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-deleted-fbbd6f2c-037f-4a48-ad02-f3292bd93802 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.844750] env[62508]: DEBUG nova.compute.manager [req-a076dbe2-1da6-4496-ba74-e2fa1ea7b091 req-c756335b-2cd8-4742-ace7-656363baed50 service nova] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Received event network-vif-deleted-93c4291d-0197-4fb0-9a10-3b95a4a17e60 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.848771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.849485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.931191] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775582, 'name': PowerOffVM_Task, 'duration_secs': 0.203592} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.931509] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.931745] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.932044] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84c8d34b-a53c-4b62-9db8-2c857a51bb25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.011615] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fed3ad9-7490-453d-baec-852f6a80be3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.019595] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1398.019978] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1398.020379] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Deleting the datastore file [datastore1] a239d78f-085a-4e5c-924d-cf338298fa73 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1398.021486] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-120938a3-90ba-4b25-898d-c2dc7b554572 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.027608] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Suspending the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1398.029195] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ad0f7646-ef51-4e7a-a674-093eb72bad3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.032773] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for the task: (returnval){ [ 1398.032773] env[62508]: value = "task-1775585" [ 1398.032773] env[62508]: _type = "Task" [ 1398.032773] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.036847] env[62508]: DEBUG oslo_vmware.api [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] Waiting for the task: (returnval){ [ 1398.036847] env[62508]: value = "task-1775586" [ 1398.036847] env[62508]: _type = "Task" [ 1398.036847] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.045268] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.050848] env[62508]: DEBUG oslo_vmware.api [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] Task: {'id': task-1775586, 'name': SuspendVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.094034] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1398.103928] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775583, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.129862] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Successfully created port: a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.175019] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1398.194041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.544833] env[62508]: DEBUG oslo_vmware.api [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Task: {'id': task-1775585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.38221} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.547613] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.547856] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.548080] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.548263] env[62508]: INFO nova.compute.manager [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1398.548503] env[62508]: DEBUG oslo.service.loopingcall [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.548881] env[62508]: DEBUG nova.compute.manager [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1398.549000] env[62508]: DEBUG nova.network.neutron [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1398.553537] env[62508]: DEBUG oslo_vmware.api [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] Task: {'id': task-1775586, 'name': SuspendVM_Task} progress is 62%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.606786] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775583, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56795} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.608947] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 18ee140a-97bd-439a-8027-0dd0a1f0a6e2/18ee140a-97bd-439a-8027-0dd0a1f0a6e2.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1398.609241] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1398.610057] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37063932-0174-42c1-9d9c-c83deed2e922 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.613131] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc511f1f-5667-4c47-9554-3b3911a2b8c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.620800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d4912d-1cdc-4b1b-a03a-cb786ea88e90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.624356] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1398.624356] env[62508]: value = "task-1775587" [ 1398.624356] env[62508]: _type = "Task" [ 1398.624356] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.625282] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.657731] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf726cf-1f0b-4109-8cb3-d2387829038d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.663901] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.670082] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9564dd-97a1-47e6-a2e7-32a1609ca3b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.688808] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1398.888861] env[62508]: DEBUG nova.compute.manager [req-2ea4e475-5bcd-442b-8eb0-38cb2660d156 req-30700833-2608-4e75-ab4d-3bd405d6b2e4 service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Received event network-vif-deleted-f6601a11-9230-42a6-969e-6d69816e0f4b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1398.889116] env[62508]: INFO nova.compute.manager [req-2ea4e475-5bcd-442b-8eb0-38cb2660d156 req-30700833-2608-4e75-ab4d-3bd405d6b2e4 service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Neutron deleted interface f6601a11-9230-42a6-969e-6d69816e0f4b; detaching it from the instance and deleting it from the info cache [ 1398.889259] env[62508]: DEBUG nova.network.neutron [req-2ea4e475-5bcd-442b-8eb0-38cb2660d156 req-30700833-2608-4e75-ab4d-3bd405d6b2e4 service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.047819] env[62508]: DEBUG oslo_vmware.api [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] Task: {'id': task-1775586, 'name': SuspendVM_Task, 'duration_secs': 0.666685} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.048102] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Suspended the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1399.048290] env[62508]: DEBUG nova.compute.manager [None req-4f69a955-358d-423e-9b06-9d77742997cb tempest-ServersAdminNegativeTestJSON-1576797419 tempest-ServersAdminNegativeTestJSON-1576797419-project-admin] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1399.049060] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4cc795-d836-43eb-ad76-fe4a9fc50776 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.134751] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079436} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.135054] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.135838] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f046c3-2304-45f4-8d47-dae77092e1e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.158092] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 18ee140a-97bd-439a-8027-0dd0a1f0a6e2/18ee140a-97bd-439a-8027-0dd0a1f0a6e2.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.158882] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c2fa461-28be-476a-883b-f3b32ea72cdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.178741] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1399.178741] env[62508]: value = "task-1775588" [ 1399.178741] env[62508]: _type = "Task" [ 1399.178741] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.186587] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.194787] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1399.215381] env[62508]: ERROR nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [req-b2dd42e9-37a3-4ad8-ae2f-1934ca211157] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b2dd42e9-37a3-4ad8-ae2f-1934ca211157"}]} [ 1399.221866] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1399.222214] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1399.222448] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1399.222590] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1399.222800] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1399.222969] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1399.223194] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1399.223359] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1399.223524] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1399.223689] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1399.223872] env[62508]: DEBUG nova.virt.hardware [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1399.225053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4cdec8-2aa1-484f-ae1f-fafddc0ef10c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.231326] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1399.236207] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728f7cef-7dbf-4e64-8ca7-c08ea09bbeef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.250120] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1399.250335] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1399.261593] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1399.280097] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1399.352393] env[62508]: DEBUG nova.network.neutron [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.391907] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-102cfb08-c749-4a3e-9ca2-8ebebb69d84a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.401598] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe955544-b221-4866-9cdb-0911eb39a77d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.440544] env[62508]: DEBUG nova.compute.manager [req-2ea4e475-5bcd-442b-8eb0-38cb2660d156 req-30700833-2608-4e75-ab4d-3bd405d6b2e4 service nova] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Detach interface failed, port_id=f6601a11-9230-42a6-969e-6d69816e0f4b, reason: Instance a239d78f-085a-4e5c-924d-cf338298fa73 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1399.689068] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775588, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.744254] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5170cbd5-7c2e-4ce0-aecc-92ab13cd0a93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.751856] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3085a005-60ae-44b9-b1bb-331c94b726cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.785549] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e6a954-5894-4b0d-814e-76be55baebd9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.793394] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e69e789-6b0a-4a72-92e3-4f11fd168dd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.811506] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1399.820664] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Successfully updated port: a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1399.856028] env[62508]: INFO nova.compute.manager [-] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Took 1.31 seconds to deallocate network for instance. [ 1399.901626] env[62508]: DEBUG nova.compute.manager [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received event network-vif-plugged-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.901845] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Acquiring lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.902064] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.902333] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.902507] env[62508]: DEBUG nova.compute.manager [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] No waiting events found dispatching network-vif-plugged-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.902673] env[62508]: WARNING nova.compute.manager [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received unexpected event network-vif-plugged-a4c6f41a-0293-43f8-a413-d6181d46187c for instance with vm_state building and task_state spawning. [ 1399.902833] env[62508]: DEBUG nova.compute.manager [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.902983] env[62508]: DEBUG nova.compute.manager [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing instance network info cache due to event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1399.903193] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Acquiring lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.903329] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Acquired lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.903487] env[62508]: DEBUG nova.network.neutron [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing network info cache for port a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.190284] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775588, 'name': ReconfigVM_Task, 'duration_secs': 0.52179} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.190547] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 18ee140a-97bd-439a-8027-0dd0a1f0a6e2/18ee140a-97bd-439a-8027-0dd0a1f0a6e2.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.191117] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24f85fce-e808-4462-922a-767d39546c2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.198818] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1400.198818] env[62508]: value = "task-1775589" [ 1400.198818] env[62508]: _type = "Task" [ 1400.198818] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.207164] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775589, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.323502] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.333775] env[62508]: ERROR nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [req-6b37aa4b-91c5-4d01-a04c-8a18010fa98c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b37aa4b-91c5-4d01-a04c-8a18010fa98c"}]} [ 1400.350050] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1400.361754] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1400.362010] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1400.364619] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.373436] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1400.390966] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1400.435350] env[62508]: DEBUG nova.network.neutron [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1400.710155] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775589, 'name': Rename_Task, 'duration_secs': 0.129416} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.710514] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.710852] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef67f8a7-8502-4385-95f1-679dc735803e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.716941] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1400.716941] env[62508]: value = "task-1775590" [ 1400.716941] env[62508]: _type = "Task" [ 1400.716941] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.728071] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.733590] env[62508]: DEBUG nova.network.neutron [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.741985] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "06baedda-2926-4ec8-a4f6-d62713f48a26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.742278] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.817911] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d29a9c1-299a-468b-8560-76ac3262c56f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.826970] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa018655-838d-410e-8c77-b3879e7f1419 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.858557] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c50ff5a-5947-4fd1-a2cc-e0601b31b138 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.866209] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8be519-8376-409d-a9fb-f72cbedcfb81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.880055] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1401.028653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.028893] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.227416] env[62508]: DEBUG oslo_vmware.api [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775590, 'name': PowerOnVM_Task, 'duration_secs': 0.416158} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.227681] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.227889] env[62508]: INFO nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Took 8.54 seconds to spawn the instance on the hypervisor. [ 1401.228077] env[62508]: DEBUG nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.228872] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e815b0e6-486a-4ef1-b075-0a23990fa122 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.237738] env[62508]: DEBUG oslo_concurrency.lockutils [req-d45704b8-458f-4813-b872-36d508aef544 req-4a484bc0-bc75-4f1c-bb46-937a52ea0362 service nova] Releasing lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.238568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.238568] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1401.402955] env[62508]: ERROR nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [req-a4554b10-efd3-46bf-acc5-b7bc1f8ad55b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a4554b10-efd3-46bf-acc5-b7bc1f8ad55b"}]} [ 1401.420555] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1401.438219] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1401.438455] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1401.450031] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1401.470183] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1401.748652] env[62508]: INFO nova.compute.manager [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Took 44.75 seconds to build instance. [ 1401.785880] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.842252] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0afb87-a900-4974-8831-1911eb89c455 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.849780] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41158010-9054-426e-ae33-9b8e17496c40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.882208] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef05ffd0-5cc9-4e9b-900b-25cd666d04c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.890058] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb6f0b4-62ba-429a-bd59-4a86c526b9e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.903592] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1401.983503] env[62508]: DEBUG nova.network.neutron [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [{"id": "a4c6f41a-0293-43f8-a413-d6181d46187c", "address": "fa:16:3e:fc:03:0e", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c6f41a-02", "ovs_interfaceid": "a4c6f41a-0293-43f8-a413-d6181d46187c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.043801] env[62508]: DEBUG nova.compute.manager [None req-f6c4f522-72ea-4c8d-8f5e-56ef06960b0d tempest-ServerDiagnosticsTest-424522467 tempest-ServerDiagnosticsTest-424522467-project-admin] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1402.044957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1dff1f-9af5-486e-91c7-d5caaee22386 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.053546] env[62508]: INFO nova.compute.manager [None req-f6c4f522-72ea-4c8d-8f5e-56ef06960b0d tempest-ServerDiagnosticsTest-424522467 tempest-ServerDiagnosticsTest-424522467-project-admin] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Retrieving diagnostics [ 1402.054699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40942917-4850-42aa-aeb1-c30414bc6a41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.086888] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "ffe54977-81c4-4842-9773-eed704a53ada" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.087184] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.087394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "ffe54977-81c4-4842-9773-eed704a53ada-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.087597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.087748] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.090856] env[62508]: INFO nova.compute.manager [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Terminating instance [ 1402.092586] env[62508]: DEBUG nova.compute.manager [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1402.092783] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.093577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44d1150-e51c-4294-9310-4e5927e92b7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.100785] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.101047] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43236399-b7a8-4a3e-b2b6-fadd40d379d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.232656] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.233405] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.233405] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleting the datastore file [datastore1] ffe54977-81c4-4842-9773-eed704a53ada {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.233405] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b0f2e2c-5059-4f62-a198-aeb25ff36a36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.240254] env[62508]: DEBUG oslo_vmware.api [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1402.240254] env[62508]: value = "task-1775592" [ 1402.240254] env[62508]: _type = "Task" [ 1402.240254] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.248704] env[62508]: DEBUG oslo_vmware.api [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.252239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e23743ae-c322-4f13-8faf-faf1ab7ff372 tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.134s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.438224] env[62508]: DEBUG nova.scheduler.client.report [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1402.438491] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 59 to 60 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1402.438720] env[62508]: DEBUG nova.compute.provider_tree [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1402.486084] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.486235] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance network_info: |[{"id": "a4c6f41a-0293-43f8-a413-d6181d46187c", "address": "fa:16:3e:fc:03:0e", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c6f41a-02", "ovs_interfaceid": "a4c6f41a-0293-43f8-a413-d6181d46187c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1402.486619] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:03:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4c6f41a-0293-43f8-a413-d6181d46187c', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1402.494805] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Creating folder: Project (a34358e29dde46139ee4aa5c8f57d0d0). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1402.495480] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e37b5fb1-6da5-458c-b13c-897e536b185d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.507237] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Created folder: Project (a34358e29dde46139ee4aa5c8f57d0d0) in parent group-v368536. [ 1402.507422] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Creating folder: Instances. Parent ref: group-v368619. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1402.507651] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a72401c-455f-4988-bb22-c759951dc1ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.516420] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Created folder: Instances in parent group-v368619. [ 1402.516647] env[62508]: DEBUG oslo.service.loopingcall [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.516825] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1402.517072] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f333677-79fe-43f0-8752-d1b002bfa48f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.535804] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1402.535804] env[62508]: value = "task-1775595" [ 1402.535804] env[62508]: _type = "Task" [ 1402.535804] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.543040] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775595, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.750272] env[62508]: DEBUG oslo_vmware.api [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182701} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.750600] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1402.750694] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1402.750807] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1402.750989] env[62508]: INFO nova.compute.manager [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1402.751278] env[62508]: DEBUG oslo.service.loopingcall [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.751491] env[62508]: DEBUG nova.compute.manager [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1402.751588] env[62508]: DEBUG nova.network.neutron [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1402.754793] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1402.943775] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.784s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.944409] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1402.949110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.415s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.045929] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775595, 'name': CreateVM_Task, 'duration_secs': 0.340306} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.047415] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1403.048104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.048274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.048592] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1403.048892] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1682b5f8-80f1-46e9-9c7e-48830b80f8b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.054293] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1403.054293] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523c3a6c-9c1b-2da2-3bb8-49eff8795644" [ 1403.054293] env[62508]: _type = "Task" [ 1403.054293] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.063968] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523c3a6c-9c1b-2da2-3bb8-49eff8795644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.066014] env[62508]: DEBUG nova.compute.manager [req-5899186d-435d-4a0a-90db-70da5d4b5751 req-8cb69d6c-9330-4ac3-817c-dd8c6cdc872b service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Received event network-vif-deleted-43717415-1abf-4418-8d1c-38df0c6f0ea9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.066805] env[62508]: INFO nova.compute.manager [req-5899186d-435d-4a0a-90db-70da5d4b5751 req-8cb69d6c-9330-4ac3-817c-dd8c6cdc872b service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Neutron deleted interface 43717415-1abf-4418-8d1c-38df0c6f0ea9; detaching it from the instance and deleting it from the info cache [ 1403.066805] env[62508]: DEBUG nova.network.neutron [req-5899186d-435d-4a0a-90db-70da5d4b5751 req-8cb69d6c-9330-4ac3-817c-dd8c6cdc872b service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.203009] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.203296] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.203511] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.203693] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.203863] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.206041] env[62508]: INFO nova.compute.manager [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Terminating instance [ 1403.207738] env[62508]: DEBUG nova.compute.manager [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1403.207881] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.208779] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba015711-9ba2-4d2e-a6ea-1d463dd62d96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.216865] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1403.217113] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-749df566-05a9-47c6-8945-22e6f4f4741b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.224033] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1403.224033] env[62508]: value = "task-1775596" [ 1403.224033] env[62508]: _type = "Task" [ 1403.224033] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.231499] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.277837] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.453065] env[62508]: DEBUG nova.compute.utils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1403.454379] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1403.454543] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1403.532884] env[62508]: DEBUG nova.policy [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2561183ef9c54615988c33906fc5f84e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce0dd059301e41abb3758625d38e435e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1403.551536] env[62508]: DEBUG nova.network.neutron [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.565660] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523c3a6c-9c1b-2da2-3bb8-49eff8795644, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.566055] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.566181] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1403.566407] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.566550] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.566725] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1403.566969] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21f3faa1-bafa-42f6-af96-8c5ed021e83f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.568917] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66e5ea3c-970a-42bc-8a46-97808520be14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.579496] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c569f5f4-94b1-4a39-9362-9435dadab4c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.594734] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1403.594877] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1403.595931] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19dcd895-3cc6-4885-9a87-1228820937d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.613649] env[62508]: DEBUG nova.compute.manager [req-5899186d-435d-4a0a-90db-70da5d4b5751 req-8cb69d6c-9330-4ac3-817c-dd8c6cdc872b service nova] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Detach interface failed, port_id=43717415-1abf-4418-8d1c-38df0c6f0ea9, reason: Instance ffe54977-81c4-4842-9773-eed704a53ada could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1403.615318] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1403.615318] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ea406-739b-f2ce-7aa1-542865c6f444" [ 1403.615318] env[62508]: _type = "Task" [ 1403.615318] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.624163] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ea406-739b-f2ce-7aa1-542865c6f444, 'name': SearchDatastore_Task, 'duration_secs': 0.008881} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.624337] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d59f209-6688-4a82-9606-5e3d00150863 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.629176] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1403.629176] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201e14c-63ee-d030-ee16-36f252bda145" [ 1403.629176] env[62508]: _type = "Task" [ 1403.629176] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.636426] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201e14c-63ee-d030-ee16-36f252bda145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.733821] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775596, 'name': PowerOffVM_Task, 'duration_secs': 0.176073} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.734610] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.734610] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.734610] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec9a65d0-beaf-4232-9859-4d52f539734c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.797282] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.797586] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.797774] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Deleting the datastore file [datastore1] 18ee140a-97bd-439a-8027-0dd0a1f0a6e2 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.797933] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74a47225-7224-4ea7-8fbc-4b9512d32399 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.804161] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for the task: (returnval){ [ 1403.804161] env[62508]: value = "task-1775598" [ 1403.804161] env[62508]: _type = "Task" [ 1403.804161] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.811573] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.860244] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Successfully created port: d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1403.967968] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1403.999426] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance ee99ff4d-9996-4cfa-b038-7b19aef27438 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1403.999591] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1403.999796] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 192995e7-82f5-41be-990d-d91b93f981e1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1403.999933] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 03552483-a365-4d25-94bc-ea9b38ee6cd6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.000064] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7d23d8f0-d7a9-4236-ad28-208e77b72138 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.000184] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b911f25d-711b-411e-bb2d-2e59386ff2ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.000304] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.000423] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 42eb98a9-e341-4a17-9d76-2a2c37efc1a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.000562] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (5d5b4923-a8ac-4688-9f86-2405bd3406a9): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1404.000681] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 73452964-d690-451d-98c3-fba3c3301c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.000791] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a226327d-11df-45e0-bef8-2337a0317c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.000916] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b297d642-88a7-4acc-a94d-e1cb7df81982 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.001025] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 868cf942-f348-488d-b00a-af4c8b5efda5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.001148] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2f7b7109-0ced-4ea4-8dde-608655f2b3ab is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.001294] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aae3b4a3-c954-4f73-bd12-9b19a675179c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.001420] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance ce74cbd8-b709-418b-a206-f51975fd0af1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.001540] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a239d78f-085a-4e5c-924d-cf338298fa73 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1404.001653] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance ffe54977-81c4-4842-9773-eed704a53ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.001765] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 18ee140a-97bd-439a-8027-0dd0a1f0a6e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.001875] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.002057] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 95a289ac-3178-45ea-80d2-905b9af54f3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.054028] env[62508]: INFO nova.compute.manager [-] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Took 1.30 seconds to deallocate network for instance. [ 1404.139173] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201e14c-63ee-d030-ee16-36f252bda145, 'name': SearchDatastore_Task, 'duration_secs': 0.032689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.139426] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.139680] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad/84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1404.139928] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7045926-5cb6-4d8e-abbb-6b3d000df730 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.146772] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1404.146772] env[62508]: value = "task-1775599" [ 1404.146772] env[62508]: _type = "Task" [ 1404.146772] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.154828] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.313532] env[62508]: DEBUG oslo_vmware.api [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Task: {'id': task-1775598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254295} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.313812] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.313999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.314199] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.314385] env[62508]: INFO nova.compute.manager [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1404.314641] env[62508]: DEBUG oslo.service.loopingcall [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.314828] env[62508]: DEBUG nova.compute.manager [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1404.314923] env[62508]: DEBUG nova.network.neutron [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.505646] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1404.560145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.666368] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775599, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.864107] env[62508]: DEBUG nova.compute.manager [req-9f831a6b-2b8c-474c-8864-e73609a12973 req-640989c9-b6f4-4115-b364-6b569cbe0741 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Received event network-vif-deleted-61d5f183-d4da-47f0-814e-82621dbc6657 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.864336] env[62508]: INFO nova.compute.manager [req-9f831a6b-2b8c-474c-8864-e73609a12973 req-640989c9-b6f4-4115-b364-6b569cbe0741 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Neutron deleted interface 61d5f183-d4da-47f0-814e-82621dbc6657; detaching it from the instance and deleting it from the info cache [ 1404.864472] env[62508]: DEBUG nova.network.neutron [req-9f831a6b-2b8c-474c-8864-e73609a12973 req-640989c9-b6f4-4115-b364-6b569cbe0741 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.982249] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1405.010033] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1405.010608] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1405.010789] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1405.010960] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1405.011150] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1405.011398] env[62508]: DEBUG nova.virt.hardware [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1405.012121] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.013994] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44db505b-a4d6-4893-9b60-527f4e7f91d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.022774] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce006c15-3e51-4064-b75c-1c7a3fad8390 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.157735] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617598} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.157917] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad/84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1405.158138] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1405.158409] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de16b029-2771-43b2-8727-ebd71a8af189 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.165138] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1405.165138] env[62508]: value = "task-1775600" [ 1405.165138] env[62508]: _type = "Task" [ 1405.165138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.175259] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.323750] env[62508]: DEBUG nova.network.neutron [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.367048] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c13e9372-6482-46d7-96ec-271bf29135bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.377456] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83348930-17b1-47bf-a9b1-905d9a81cf13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.407658] env[62508]: DEBUG nova.compute.manager [req-9f831a6b-2b8c-474c-8864-e73609a12973 req-640989c9-b6f4-4115-b364-6b569cbe0741 service nova] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Detach interface failed, port_id=61d5f183-d4da-47f0-814e-82621dbc6657, reason: Instance 18ee140a-97bd-439a-8027-0dd0a1f0a6e2 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1405.431231] env[62508]: DEBUG nova.compute.manager [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Received event network-vif-plugged-d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1405.431443] env[62508]: DEBUG oslo_concurrency.lockutils [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.431708] env[62508]: DEBUG oslo_concurrency.lockutils [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.431823] env[62508]: DEBUG oslo_concurrency.lockutils [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.431960] env[62508]: DEBUG nova.compute.manager [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] No waiting events found dispatching network-vif-plugged-d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1405.432168] env[62508]: WARNING nova.compute.manager [req-76385a1c-6b6f-4fc8-aa5a-0341922fd451 req-67b8adec-53c3-4d31-801b-3d29e9e8f467 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Received unexpected event network-vif-plugged-d9e88907-9194-4d90-87ab-d1b87ef0e48e for instance with vm_state building and task_state spawning. [ 1405.516724] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Successfully updated port: d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1405.521694] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.675249] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062521} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.675537] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1405.676355] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aacf0a1-6edd-43eb-9ee5-1f8335a909e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.699177] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad/84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1405.699390] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-071dfcb5-7caf-4f0d-834f-f68f24f1cc02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.719509] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1405.719509] env[62508]: value = "task-1775601" [ 1405.719509] env[62508]: _type = "Task" [ 1405.719509] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.727906] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.826539] env[62508]: INFO nova.compute.manager [-] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Took 1.51 seconds to deallocate network for instance. [ 1406.018274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.018668] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.018668] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1406.021032] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.230545] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775601, 'name': ReconfigVM_Task, 'duration_secs': 0.295653} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.230785] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad/84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1406.231504] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d4f9d7f-2e94-4cec-ab3b-9acac392a976 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.238210] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1406.238210] env[62508]: value = "task-1775602" [ 1406.238210] env[62508]: _type = "Task" [ 1406.238210] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.246501] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775602, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.333960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.525552] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance db74146d-abc3-4d48-be1b-6ad471794dbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.556407] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1406.690814] env[62508]: DEBUG nova.network.neutron [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.748299] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775602, 'name': Rename_Task, 'duration_secs': 0.129841} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.748558] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1406.748792] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f39f8c9-08a6-4d63-83e2-77bb36287773 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.754511] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1406.754511] env[62508]: value = "task-1775603" [ 1406.754511] env[62508]: _type = "Task" [ 1406.754511] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.763845] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.029392] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 70c8de27-4696-4005-bbec-e7a33e56311b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1407.198682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.199034] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Instance network_info: |[{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1407.199501] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:bf:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9e88907-9194-4d90-87ab-d1b87ef0e48e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1407.207720] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating folder: Project (ce0dd059301e41abb3758625d38e435e). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1407.208024] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30b54501-91bc-499a-8368-3fdf32c8594d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.220593] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created folder: Project (ce0dd059301e41abb3758625d38e435e) in parent group-v368536. [ 1407.220782] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating folder: Instances. Parent ref: group-v368622. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1407.221017] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51ea965e-ceb0-4868-80c2-42d9649ff760 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.229897] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created folder: Instances in parent group-v368622. [ 1407.230138] env[62508]: DEBUG oslo.service.loopingcall [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1407.230322] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1407.230618] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a76af4b5-ea83-4c06-9d16-27460d209cf5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.250725] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1407.250725] env[62508]: value = "task-1775606" [ 1407.250725] env[62508]: _type = "Task" [ 1407.250725] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.260647] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775606, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.265325] env[62508]: DEBUG oslo_vmware.api [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775603, 'name': PowerOnVM_Task, 'duration_secs': 0.448329} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.265586] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1407.265790] env[62508]: INFO nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Took 8.07 seconds to spawn the instance on the hypervisor. [ 1407.265981] env[62508]: DEBUG nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.266724] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aec6b81-8dfa-4e03-bd86-c9e6eec6d38d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.468346] env[62508]: DEBUG nova.compute.manager [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Received event network-changed-d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1407.468550] env[62508]: DEBUG nova.compute.manager [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Refreshing instance network info cache due to event network-changed-d9e88907-9194-4d90-87ab-d1b87ef0e48e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1407.468764] env[62508]: DEBUG oslo_concurrency.lockutils [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.468908] env[62508]: DEBUG oslo_concurrency.lockutils [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.469091] env[62508]: DEBUG nova.network.neutron [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Refreshing network info cache for port d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.532739] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1407.760467] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775606, 'name': CreateVM_Task, 'duration_secs': 0.32525} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.760651] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.761391] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.761567] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.761879] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.762145] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e051299c-049b-4bc9-977e-bcc4a94315ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.766724] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1407.766724] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d59def-f571-2bb8-e9a8-1443f3381d2a" [ 1407.766724] env[62508]: _type = "Task" [ 1407.766724] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.774176] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d59def-f571-2bb8-e9a8-1443f3381d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.785679] env[62508]: INFO nova.compute.manager [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Took 39.74 seconds to build instance. [ 1408.035127] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 38d294a9-2f51-438d-b942-a88e380a981f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1408.223129] env[62508]: DEBUG nova.network.neutron [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updated VIF entry in instance network info cache for port d9e88907-9194-4d90-87ab-d1b87ef0e48e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1408.223491] env[62508]: DEBUG nova.network.neutron [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.277435] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d59def-f571-2bb8-e9a8-1443f3381d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.013156} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.277743] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.277982] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1408.278228] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.278377] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.278554] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1408.278805] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6e7434c-522e-42d4-bc4d-e1e3781574dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.287194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ac6f1ed-42df-486d-be6a-5290f6524525 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.021s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.290177] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1408.290177] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1408.290294] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9fc05dd-660e-4b63-aef0-eddd205f6323 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.298038] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1408.298038] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0d882-e24c-5b94-1f05-846618d675ec" [ 1408.298038] env[62508]: _type = "Task" [ 1408.298038] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.307248] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0d882-e24c-5b94-1f05-846618d675ec, 'name': SearchDatastore_Task, 'duration_secs': 0.008179} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.307983] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-101dbd56-362d-4308-b6f9-40577de20ca1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.313507] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1408.313507] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca285d-8d68-9264-e7e2-bb4934187c26" [ 1408.313507] env[62508]: _type = "Task" [ 1408.313507] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.321649] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca285d-8d68-9264-e7e2-bb4934187c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.466906] env[62508]: DEBUG nova.compute.manager [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1408.467137] env[62508]: DEBUG nova.compute.manager [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing instance network info cache due to event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1408.467342] env[62508]: DEBUG oslo_concurrency.lockutils [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] Acquiring lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.467460] env[62508]: DEBUG oslo_concurrency.lockutils [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] Acquired lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.467644] env[62508]: DEBUG nova.network.neutron [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing network info cache for port a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1408.538123] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 63fca45d-5922-4a14-9936-30070c349f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1408.726711] env[62508]: DEBUG oslo_concurrency.lockutils [req-40c05b9e-e266-4d4c-a683-b6d3cf3fb9ee req-0b4e461f-7e91-445d-94c1-666ea20da822 service nova] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.794320] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1408.824403] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca285d-8d68-9264-e7e2-bb4934187c26, 'name': SearchDatastore_Task, 'duration_secs': 0.008644} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.824689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.824943] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1408.825223] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6415680e-ae90-4418-80b5-4cc2744c1b81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.832077] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1408.832077] env[62508]: value = "task-1775607" [ 1408.832077] env[62508]: _type = "Task" [ 1408.832077] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.840352] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.041197] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 9a3ef326-0fbf-4fd2-bb5e-3009bf661381 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.301534] env[62508]: DEBUG nova.network.neutron [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updated VIF entry in instance network info cache for port a4c6f41a-0293-43f8-a413-d6181d46187c. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.301897] env[62508]: DEBUG nova.network.neutron [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [{"id": "a4c6f41a-0293-43f8-a413-d6181d46187c", "address": "fa:16:3e:fc:03:0e", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c6f41a-02", "ovs_interfaceid": "a4c6f41a-0293-43f8-a413-d6181d46187c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.320713] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.342172] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488545} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.342490] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1409.342704] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1409.342950] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d40265b5-3af6-44ca-889a-79c5eeb51585 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.349838] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1409.349838] env[62508]: value = "task-1775608" [ 1409.349838] env[62508]: _type = "Task" [ 1409.349838] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.357903] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.544232] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.807364] env[62508]: DEBUG oslo_concurrency.lockutils [req-297fedb5-bdd5-4fd7-8c48-d5543fd0e7f6 req-18ddc760-2788-479d-9714-e3f84ad18200 service nova] Releasing lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.859274] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069349} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.859578] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.860393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae576639-f552-4047-b654-ff3505bd27fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.884271] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.884271] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0538b9eb-56db-409b-bb05-a5e34e7a0608 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.904412] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1409.904412] env[62508]: value = "task-1775609" [ 1409.904412] env[62508]: _type = "Task" [ 1409.904412] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.912974] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.049018] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 06baedda-2926-4ec8-a4f6-d62713f48a26 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.414879] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.553457] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 45de6dd5-97f3-4eea-a171-0254a2b37a41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.553457] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1410.553457] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1410.587101] env[62508]: DEBUG nova.compute.manager [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1410.587358] env[62508]: DEBUG nova.compute.manager [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing instance network info cache due to event network-changed-a4c6f41a-0293-43f8-a413-d6181d46187c. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1410.587579] env[62508]: DEBUG oslo_concurrency.lockutils [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] Acquiring lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.587722] env[62508]: DEBUG oslo_concurrency.lockutils [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] Acquired lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.587882] env[62508]: DEBUG nova.network.neutron [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Refreshing network info cache for port a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.914131] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.993459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8ed046-7912-4e8a-8bbb-1608923f5337 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.000715] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b29f32a-8870-49d4-8879-8ef6cc793aa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.032486] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a6bb98-ad26-45b4-8714-79d3ed3a1a14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.039879] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf0b314-39ae-499a-b5c4-2495e4cb034d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.052815] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.328033] env[62508]: DEBUG nova.network.neutron [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updated VIF entry in instance network info cache for port a4c6f41a-0293-43f8-a413-d6181d46187c. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1411.328419] env[62508]: DEBUG nova.network.neutron [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [{"id": "a4c6f41a-0293-43f8-a413-d6181d46187c", "address": "fa:16:3e:fc:03:0e", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c6f41a-02", "ovs_interfaceid": "a4c6f41a-0293-43f8-a413-d6181d46187c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.334505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "879f1e09-8b21-4f89-bc00-04e3d6710662" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.334769] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.415245] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775609, 'name': ReconfigVM_Task, 'duration_secs': 1.116316} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.415525] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1411.416136] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c6c4141-5be4-4a22-b207-bb7dea2ddcab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.422522] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1411.422522] env[62508]: value = "task-1775610" [ 1411.422522] env[62508]: _type = "Task" [ 1411.422522] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.430577] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775610, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.573455] env[62508]: ERROR nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [req-8b448614-eab8-4dc6-b7a1-1b9678e0fbf7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8b448614-eab8-4dc6-b7a1-1b9678e0fbf7"}]} [ 1411.589231] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1411.602680] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1411.602923] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.614062] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1411.631563] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1411.831303] env[62508]: DEBUG oslo_concurrency.lockutils [req-16c669ea-fa41-4874-aca1-90d99e6c546f req-501e3b5e-4c3c-45a4-9f60-e06de046da8c service nova] Releasing lock "refresh_cache-84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.931478] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775610, 'name': Rename_Task, 'duration_secs': 0.128911} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.933846] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1411.934830] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd010f74-9156-4cc9-835e-45ac38238fd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.940680] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1411.940680] env[62508]: value = "task-1775611" [ 1411.940680] env[62508]: _type = "Task" [ 1411.940680] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.957905] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.036340] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771589e2-6493-47a7-8416-4921c628c6cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.044339] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f25b3ce-6f7a-4185-a370-7cb750d1a1b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.076071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8577187f-cad6-47a9-a26d-09ca8cefbd7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.083860] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f45a821-0cd1-49e5-b9e1-ebc994ffcb10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.098438] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1412.451039] env[62508]: DEBUG oslo_vmware.api [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775611, 'name': PowerOnVM_Task, 'duration_secs': 0.48804} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.451329] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1412.451546] env[62508]: INFO nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Took 7.47 seconds to spawn the instance on the hypervisor. [ 1412.451734] env[62508]: DEBUG nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1412.452533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84361f4d-967a-43b3-a4d6-150f1fc645a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.633026] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1412.633026] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 61 to 62 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1412.633026] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1412.969823] env[62508]: INFO nova.compute.manager [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Took 41.11 seconds to build instance. [ 1413.135917] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1413.135917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.187s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.136253] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.999s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.137914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.138738] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.992s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.140151] env[62508]: INFO nova.compute.claims [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1413.142914] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.143872] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1413.167782] env[62508]: INFO nova.scheduler.client.report [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Deleted allocations for instance 42eb98a9-e341-4a17-9d76-2a2c37efc1a1 [ 1413.474064] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da0693e-2582-4214-b3d6-4d8d9f58a11c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.913s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.652437] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] There are 11 instances to clean {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1413.652437] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ad37d6a1-9053-4be6-adb2-0c62ac8ae9bd] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1413.676450] env[62508]: DEBUG oslo_concurrency.lockutils [None req-57413f0a-3169-4f67-a0be-df27b03282d4 tempest-ServerRescueTestJSONUnderV235-163328728 tempest-ServerRescueTestJSONUnderV235-163328728-project-member] Lock "42eb98a9-e341-4a17-9d76-2a2c37efc1a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.405s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.872305] env[62508]: DEBUG nova.compute.manager [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Received event network-changed-d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.872305] env[62508]: DEBUG nova.compute.manager [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Refreshing instance network info cache due to event network-changed-d9e88907-9194-4d90-87ab-d1b87ef0e48e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1413.872305] env[62508]: DEBUG oslo_concurrency.lockutils [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.872305] env[62508]: DEBUG oslo_concurrency.lockutils [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.872305] env[62508]: DEBUG nova.network.neutron [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Refreshing network info cache for port d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1413.979499] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1414.155655] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 575ea3dc-850d-4078-8678-41b3c40a4c27] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1414.504569] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.671117] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 46a524e2-93b0-4726-812f-98e08b6ba0b4] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1414.699280] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61a82e1-dd37-471e-82d1-0d8c57180d2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.707825] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1582b53-2990-45d1-a4ae-f6f13a2cbe5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.740997] env[62508]: DEBUG nova.network.neutron [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updated VIF entry in instance network info cache for port d9e88907-9194-4d90-87ab-d1b87ef0e48e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1414.741146] env[62508]: DEBUG nova.network.neutron [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.746019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08acecee-84d5-4d0b-8f6b-6cbb7cd6ec0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.751788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29dd73b-ea05-4b80-be2b-117745ee7096 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.770954] env[62508]: DEBUG nova.compute.provider_tree [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.175367] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fa00f4fe-3bb2-4e17-be22-8a1fda502f65] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1415.244484] env[62508]: DEBUG oslo_concurrency.lockutils [req-0de7d149-8b93-4676-9d2f-03da66b0030f req-3a5766ae-c1b6-4f1c-8bb2-8868e57278c5 service nova] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.271340] env[62508]: DEBUG nova.scheduler.client.report [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1415.678743] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 7339c22a-05c9-4ddd-93df-0326cbe96ca4] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1415.776550] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.777080] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1415.782089] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.436s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.783576] env[62508]: INFO nova.compute.claims [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1416.181790] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 891fac54-2ec4-4d47-8535-a33bd9dfb804] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1416.288690] env[62508]: DEBUG nova.compute.utils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1416.292939] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1416.292939] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1416.342125] env[62508]: DEBUG nova.policy [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1528de9161a4a48a75ab042aa7e0777', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be4d5bf76d544f878ee84f7202fba4ab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1416.618104] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Successfully created port: 5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1416.685707] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d32a3a5d-17d0-4a79-b76a-371cdd170ee0] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1416.793484] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1417.192406] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 827b0887-2132-49af-bcce-cedc7237245d] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1417.250962] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66548f91-0422-439c-885b-bb95ef5e3d5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.262657] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16942b4-b228-4be8-aaf6-f67934f18845 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.296900] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dd2964-3c0d-4e8f-bbe9-1746774065dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.308850] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ecc333-a336-4e2a-90fa-da43fbb8a2b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.324220] env[62508]: DEBUG nova.compute.provider_tree [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.696482] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b182d3aa-a4de-4879-ab36-2cb51472158a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1417.805987] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1417.829904] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1417.830164] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1417.830321] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.830504] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1417.830651] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.830798] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1417.830997] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1417.831174] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1417.831338] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1417.831555] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1417.831821] env[62508]: DEBUG nova.virt.hardware [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1417.833382] env[62508]: DEBUG nova.scheduler.client.report [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1417.836990] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c3c4e8-479a-44d9-b901-6e99c6c04cf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.845685] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d470513f-8a1b-40cb-9691-31dc7fe9b06b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.099825] env[62508]: DEBUG nova.compute.manager [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Received event network-vif-plugged-5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1418.099984] env[62508]: DEBUG oslo_concurrency.lockutils [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] Acquiring lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.100197] env[62508]: DEBUG oslo_concurrency.lockutils [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.100355] env[62508]: DEBUG oslo_concurrency.lockutils [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.100525] env[62508]: DEBUG nova.compute.manager [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] No waiting events found dispatching network-vif-plugged-5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1418.100690] env[62508]: WARNING nova.compute.manager [req-37640370-9e55-4f61-a989-57272cf15fff req-63047079-fcf6-46bd-93f7-44ff9577ab05 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Received unexpected event network-vif-plugged-5f1e6b41-10f8-488b-93cf-7cb619b5e80d for instance with vm_state building and task_state spawning. [ 1418.199368] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 96aff7ef-cfc3-46ef-ba3e-b4bc1141a88b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1418.206876] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Successfully updated port: 5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1418.342060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.342060] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1418.344455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.996s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.344678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.346691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.485s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.346857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.348482] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.456s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.350155] env[62508]: INFO nova.compute.claims [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.390603] env[62508]: INFO nova.scheduler.client.report [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Deleted allocations for instance b297d642-88a7-4acc-a94d-e1cb7df81982 [ 1418.392292] env[62508]: INFO nova.scheduler.client.report [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleted allocations for instance e652e59f-9432-41cf-b4a5-0f5cf649b24e [ 1418.705035] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fb7519c4-0254-4831-81f3-0eed14844f2d] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1418.711973] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.712187] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquired lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.712340] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.855969] env[62508]: DEBUG nova.compute.utils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.857989] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1418.858335] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.908169] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab428bcf-5e90-48b7-9ffc-b794e214ee67 tempest-ServersNegativeTestMultiTenantJSON-502871563 tempest-ServersNegativeTestMultiTenantJSON-502871563-project-member] Lock "b297d642-88a7-4acc-a94d-e1cb7df81982" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.098s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.922840] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d5cbe0-0847-4645-8e5c-6663f1e6b480 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.226s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.923993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 36.764s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.924958] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.925355] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.928019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.931017] env[62508]: INFO nova.compute.manager [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Terminating instance [ 1418.932767] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.932917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquired lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.933093] env[62508]: DEBUG nova.network.neutron [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.952597] env[62508]: DEBUG nova.policy [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '712ef76e285f48e6b5e8f75aa2fee850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce113e91e2b74136a8050ed3acf3557c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1419.213569] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.213831] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances with incomplete migration {{(pid=62508) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1419.254462] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1419.363436] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1419.434959] env[62508]: DEBUG nova.compute.utils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Can not refresh info_cache because instance was not found {{(pid=62508) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1419.459421] env[62508]: DEBUG nova.network.neutron [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1419.604597] env[62508]: DEBUG nova.network.neutron [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Updating instance_info_cache with network_info: [{"id": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "address": "fa:16:3e:97:66:81", "network": {"id": "50e473c5-0765-4fbb-9942-a8c772e62a71", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-127017859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4d5bf76d544f878ee84f7202fba4ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f1e6b41-10", "ovs_interfaceid": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.659385] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Successfully created port: ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1419.714956] env[62508]: DEBUG nova.network.neutron [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.717228] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.826546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0ad486-b076-4cbb-8904-dc0880bf5563 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.836366] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7698ec-e00f-4254-a325-99b3550b9451 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.875412] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd0ce35-d629-4b15-80e4-f76937130711 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.883746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf07bacf-6dfc-408d-9514-a1b34d4cc6f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.898547] env[62508]: DEBUG nova.compute.provider_tree [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.107308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Releasing lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.107758] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Instance network_info: |[{"id": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "address": "fa:16:3e:97:66:81", "network": {"id": "50e473c5-0765-4fbb-9942-a8c772e62a71", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-127017859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4d5bf76d544f878ee84f7202fba4ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f1e6b41-10", "ovs_interfaceid": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1420.108313] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:66:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f1e6b41-10f8-488b-93cf-7cb619b5e80d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1420.119199] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Creating folder: Project (be4d5bf76d544f878ee84f7202fba4ab). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1420.119199] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ad19da3-d5fe-460e-ae68-9091157671c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.128690] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Created folder: Project (be4d5bf76d544f878ee84f7202fba4ab) in parent group-v368536. [ 1420.128914] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Creating folder: Instances. Parent ref: group-v368625. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1420.129168] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb103160-2543-4924-8ff6-7e12269ddf89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.139572] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Created folder: Instances in parent group-v368625. [ 1420.139841] env[62508]: DEBUG oslo.service.loopingcall [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.140044] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1420.140267] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be460c52-6136-431b-be25-62e9901222b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.161377] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1420.161377] env[62508]: value = "task-1775614" [ 1420.161377] env[62508]: _type = "Task" [ 1420.161377] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.170675] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775614, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.214512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Releasing lock "refresh_cache-e652e59f-9432-41cf-b4a5-0f5cf649b24e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.215390] env[62508]: DEBUG nova.compute.manager [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1420.215599] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.216135] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e4b8226-34f8-45c4-b1d7-4a542f42f6f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.226894] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258c5a1b-26b3-4601-8cb2-59b07cbed4dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.263824] env[62508]: WARNING nova.virt.vmwareapi.vmops [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e652e59f-9432-41cf-b4a5-0f5cf649b24e could not be found. [ 1420.264127] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1420.264383] env[62508]: INFO nova.compute.manager [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1420.264709] env[62508]: DEBUG oslo.service.loopingcall [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.265015] env[62508]: DEBUG nova.compute.manager [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1420.265146] env[62508]: DEBUG nova.network.neutron [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.301838] env[62508]: DEBUG nova.network.neutron [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1420.381709] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1420.402461] env[62508]: DEBUG nova.scheduler.client.report [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1420.424194] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1420.424475] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1420.424636] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1420.424826] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1420.425017] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1420.427843] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1420.428196] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1420.428464] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1420.428685] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1420.429032] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1420.429588] env[62508]: DEBUG nova.virt.hardware [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1420.430606] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3453ce1a-6dff-4447-972a-f41f9bd4000a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.440422] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f53a48-d569-4cf0-b1a0-8293e18dfcc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.486674] env[62508]: DEBUG nova.compute.manager [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Received event network-changed-5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1420.486913] env[62508]: DEBUG nova.compute.manager [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Refreshing instance network info cache due to event network-changed-5f1e6b41-10f8-488b-93cf-7cb619b5e80d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1420.487163] env[62508]: DEBUG oslo_concurrency.lockutils [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] Acquiring lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.487313] env[62508]: DEBUG oslo_concurrency.lockutils [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] Acquired lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.487474] env[62508]: DEBUG nova.network.neutron [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Refreshing network info cache for port 5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.674346] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775614, 'name': CreateVM_Task, 'duration_secs': 0.499596} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.675294] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1420.675637] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.675875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.676252] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1420.676606] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ace88b0-be7d-4dde-b5e6-454befadb4a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.682017] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1420.682017] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527e8f52-ab11-8690-497f-32e67b8ad37d" [ 1420.682017] env[62508]: _type = "Task" [ 1420.682017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.690494] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527e8f52-ab11-8690-497f-32e67b8ad37d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.806310] env[62508]: DEBUG nova.network.neutron [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.909487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.909951] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1420.912990] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.390s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.913163] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.916054] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.669s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.916054] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.917692] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.575s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.919081] env[62508]: INFO nova.compute.claims [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1420.946710] env[62508]: INFO nova.scheduler.client.report [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted allocations for instance 192995e7-82f5-41be-990d-d91b93f981e1 [ 1420.958231] env[62508]: INFO nova.scheduler.client.report [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Deleted allocations for instance 03552483-a365-4d25-94bc-ea9b38ee6cd6 [ 1421.090353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.090715] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.091017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.091273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.091562] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.096298] env[62508]: INFO nova.compute.manager [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Terminating instance [ 1421.098395] env[62508]: DEBUG nova.compute.manager [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1421.098761] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1421.100533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4925482e-6550-4494-a46b-71651278aacd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.109821] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1421.110111] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ddc8b57-72a1-4b26-ac6d-0f283bb5931a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.116824] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1421.116824] env[62508]: value = "task-1775615" [ 1421.116824] env[62508]: _type = "Task" [ 1421.116824] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.125509] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.195646] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527e8f52-ab11-8690-497f-32e67b8ad37d, 'name': SearchDatastore_Task, 'duration_secs': 0.065593} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.195646] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.196394] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1421.196498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.196800] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.197574] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1421.198698] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1856469-4414-424e-a21e-8263a6a9a71e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.208779] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1421.209069] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1421.209877] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6534d561-d06d-4d91-900e-a90299cf3a55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.218030] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1421.218030] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d15abb-bee8-6d92-85d7-fe38f7229c11" [ 1421.218030] env[62508]: _type = "Task" [ 1421.218030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.218437] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.221887] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.228078] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d15abb-bee8-6d92-85d7-fe38f7229c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.256797] env[62508]: DEBUG nova.network.neutron [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Updated VIF entry in instance network info cache for port 5f1e6b41-10f8-488b-93cf-7cb619b5e80d. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1421.257152] env[62508]: DEBUG nova.network.neutron [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Updating instance_info_cache with network_info: [{"id": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "address": "fa:16:3e:97:66:81", "network": {"id": "50e473c5-0765-4fbb-9942-a8c772e62a71", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-127017859-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4d5bf76d544f878ee84f7202fba4ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f1e6b41-10", "ovs_interfaceid": "5f1e6b41-10f8-488b-93cf-7cb619b5e80d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.308963] env[62508]: INFO nova.compute.manager [-] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Took 1.04 seconds to deallocate network for instance. [ 1421.424193] env[62508]: DEBUG nova.compute.utils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1421.427812] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1421.428061] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1421.458024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a5a71d7-912d-498c-8fcc-3e3e0b08c636 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "192995e7-82f5-41be-990d-d91b93f981e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.344s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.467305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31631713-e4cf-49e8-82c0-f2b92a3e0618 tempest-ListServersNegativeTestJSON-1383705692 tempest-ListServersNegativeTestJSON-1383705692-project-member] Lock "03552483-a365-4d25-94bc-ea9b38ee6cd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.039s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.507187] env[62508]: DEBUG nova.policy [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e23c4d8844d4273a264b0dcc148d251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27e6f55b56be40d2a619f0119aefb2ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1421.633376] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775615, 'name': PowerOffVM_Task, 'duration_secs': 0.27249} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.633376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1421.633376] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1421.635313] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ba3de05-b298-4401-8dcd-47141afe10c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.734737] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d15abb-bee8-6d92-85d7-fe38f7229c11, 'name': SearchDatastore_Task, 'duration_secs': 0.01247} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.736266] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5464ad1d-b9ca-47dc-86ed-0c0ae990e5c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.740275] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.740996] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1421.742604] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1421.752078] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1421.752078] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520be949-c70b-dcd9-6076-cab3aee3ca3f" [ 1421.752078] env[62508]: _type = "Task" [ 1421.752078] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.761902] env[62508]: DEBUG oslo_concurrency.lockutils [req-f8a56a38-8638-465a-a96b-9abb7777b5ef req-4fa91f46-77cb-4456-a518-af340e7197d9 service nova] Releasing lock "refresh_cache-fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.762027] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520be949-c70b-dcd9-6076-cab3aee3ca3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.816165] env[62508]: INFO nova.compute.manager [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance disappeared during terminate [ 1421.816422] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a173a16-1c06-48ee-b52f-533b7a306d61 tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "e652e59f-9432-41cf-b4a5-0f5cf649b24e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.892s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.931866] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1421.987862] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1421.988091] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1421.988275] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleting the datastore file [datastore1] ee99ff4d-9996-4cfa-b038-7b19aef27438 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1421.988530] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdad8ee7-4377-4cb3-9112-36e2f58511c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.995563] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for the task: (returnval){ [ 1421.995563] env[62508]: value = "task-1775617" [ 1421.995563] env[62508]: _type = "Task" [ 1421.995563] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.004928] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.042123] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Successfully created port: 985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1422.145827] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Successfully updated port: ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1422.253591] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Skipping network cache update for instance because it is being deleted. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1422.253833] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1422.253949] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1422.254164] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1422.254255] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1422.272694] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520be949-c70b-dcd9-6076-cab3aee3ca3f, 'name': SearchDatastore_Task, 'duration_secs': 0.227254} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.272694] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.272782] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8/fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1422.273961] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf5a99b2-fdac-4761-a54a-7345a7ad58b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.280936] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1422.280936] env[62508]: value = "task-1775618" [ 1422.280936] env[62508]: _type = "Task" [ 1422.280936] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.293265] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.329877] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.330037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.330326] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1422.330381] env[62508]: DEBUG nova.objects.instance [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lazy-loading 'info_cache' on Instance uuid 868cf942-f348-488d-b00a-af4c8b5efda5 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.462561] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c26da8-991a-4c12-826b-62b13cfd94ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.476680] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd62872-ec20-4a26-b934-a38e13e6508c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.519424] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2108c57d-541f-4957-9c63-38e9183a24a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.528184] env[62508]: DEBUG oslo_vmware.api [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Task: {'id': task-1775617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130375} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.530734] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1422.530938] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1422.531144] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1422.531389] env[62508]: INFO nova.compute.manager [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Took 1.43 seconds to destroy the instance on the hypervisor. [ 1422.531595] env[62508]: DEBUG oslo.service.loopingcall [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1422.532076] env[62508]: DEBUG nova.compute.manager [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1422.532076] env[62508]: DEBUG nova.network.neutron [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1422.534881] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa6d3ab-689b-4690-8d0a-c2da0daa47d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.551210] env[62508]: DEBUG nova.compute.provider_tree [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1422.589932] env[62508]: DEBUG nova.compute.manager [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Received event network-vif-plugged-ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.590251] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.590478] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.590653] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.590826] env[62508]: DEBUG nova.compute.manager [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] No waiting events found dispatching network-vif-plugged-ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1422.591622] env[62508]: WARNING nova.compute.manager [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Received unexpected event network-vif-plugged-ec6adf65-fbdf-4276-8e19-eb416336bbff for instance with vm_state building and task_state spawning. [ 1422.591622] env[62508]: DEBUG nova.compute.manager [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Received event network-changed-ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.591622] env[62508]: DEBUG nova.compute.manager [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Refreshing instance network info cache due to event network-changed-ec6adf65-fbdf-4276-8e19-eb416336bbff. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1422.591877] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.592860] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.592860] env[62508]: DEBUG nova.network.neutron [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Refreshing network info cache for port ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.655041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.791914] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484434} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.792414] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8/fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.792836] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.793229] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13f5e626-0093-4baa-81c5-5226d1bc3bdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.799544] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1422.799544] env[62508]: value = "task-1775619" [ 1422.799544] env[62508]: _type = "Task" [ 1422.799544] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.813998] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.946070] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1422.983502] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1422.983778] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1422.983933] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.984141] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1422.984286] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.984429] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1422.984652] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1422.984784] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1422.984995] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1422.985218] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1422.985412] env[62508]: DEBUG nova.virt.hardware [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1422.986426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa782ca-31be-4626-96d9-89b5b6c7a504 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.995689] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81060400-defd-4b7c-81f4-1630a89621dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.054878] env[62508]: DEBUG nova.scheduler.client.report [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1423.141311] env[62508]: DEBUG nova.network.neutron [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1423.289122] env[62508]: DEBUG nova.network.neutron [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.323408] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123665} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.323408] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.323408] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c6ce17-9a93-4acf-9f7e-4891341a5eb7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.349703] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8/fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.350267] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9255a657-4074-4430-96bd-9eaea6f371ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.375323] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1423.375323] env[62508]: value = "task-1775620" [ 1423.375323] env[62508]: _type = "Task" [ 1423.375323] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.382794] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.541412] env[62508]: DEBUG nova.network.neutron [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.561338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.561338] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1423.564391] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.627s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.564512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.570268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.284s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.570268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.570268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.133s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.571353] env[62508]: INFO nova.compute.claims [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1423.604481] env[62508]: INFO nova.scheduler.client.report [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Deleted allocations for instance ce74cbd8-b709-418b-a206-f51975fd0af1 [ 1423.612974] env[62508]: INFO nova.scheduler.client.report [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Deleted allocations for instance 2f7b7109-0ced-4ea4-8dde-608655f2b3ab [ 1423.793478] env[62508]: DEBUG oslo_concurrency.lockutils [req-793d1985-11d9-4b02-a168-6efdef164dc2 req-990b62c6-0088-4bd6-88b1-ad3bf3fd3917 service nova] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.793478] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.793600] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.884599] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775620, 'name': ReconfigVM_Task, 'duration_secs': 0.311509} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.884954] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Reconfigured VM instance instance-00000021 to attach disk [datastore1] fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8/fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1423.885671] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d6b0a01-3d94-48ff-a210-5978f6f4b995 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.894150] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1423.894150] env[62508]: value = "task-1775621" [ 1423.894150] env[62508]: _type = "Task" [ 1423.894150] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.908292] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775621, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.045770] env[62508]: INFO nova.compute.manager [-] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Took 1.51 seconds to deallocate network for instance. [ 1424.079438] env[62508]: DEBUG nova.compute.utils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1424.081942] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1424.081942] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1424.122475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d2a5166-e1b0-42f2-84d0-a2c046b3dc84 tempest-DeleteServersAdminTestJSON-1477972896 tempest-DeleteServersAdminTestJSON-1477972896-project-member] Lock "ce74cbd8-b709-418b-a206-f51975fd0af1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.562s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.124142] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8d0f1da7-9e49-4999-a0d3-df6209f935f1 tempest-InstanceActionsTestJSON-45223271 tempest-InstanceActionsTestJSON-45223271-project-member] Lock "2f7b7109-0ced-4ea4-8dde-608655f2b3ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.908s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.177677] env[62508]: DEBUG nova.policy [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e23c4d8844d4273a264b0dcc148d251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27e6f55b56be40d2a619f0119aefb2ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1424.298982] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [{"id": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "address": "fa:16:3e:1f:3b:14", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7f2eb4c-da", "ovs_interfaceid": "f7f2eb4c-dab6-44e0-8f5e-7013ddd13683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.356810] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1424.383332] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Successfully updated port: 985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1424.406875] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775621, 'name': Rename_Task, 'duration_secs': 0.175158} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.407285] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1424.407935] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c351617d-74d6-42ff-b24b-8c871f6e9aec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.414788] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1424.414788] env[62508]: value = "task-1775622" [ 1424.414788] env[62508]: _type = "Task" [ 1424.414788] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.430392] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.551976] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.568490] env[62508]: DEBUG nova.network.neutron [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.586594] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1424.671921] env[62508]: DEBUG nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Received event network-vif-deleted-987ac3c1-9f91-4672-9ca9-339fd8ad1dfd {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1424.672170] env[62508]: DEBUG nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Received event network-vif-plugged-985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1424.672362] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Acquiring lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.672700] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.672816] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.672965] env[62508]: DEBUG nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] No waiting events found dispatching network-vif-plugged-985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1424.673145] env[62508]: WARNING nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Received unexpected event network-vif-plugged-985905ec-2a79-4b7a-b4ad-d3bf00a42f43 for instance with vm_state building and task_state spawning. [ 1424.673300] env[62508]: DEBUG nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Received event network-changed-985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1424.673451] env[62508]: DEBUG nova.compute.manager [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Refreshing instance network info cache due to event network-changed-985905ec-2a79-4b7a-b4ad-d3bf00a42f43. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1424.673630] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Acquiring lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.673762] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Acquired lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.673983] env[62508]: DEBUG nova.network.neutron [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Refreshing network info cache for port 985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.804305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-868cf942-f348-488d-b00a-af4c8b5efda5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.804593] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1424.804843] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.805301] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.805602] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.805855] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.806268] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.806526] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.806735] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1424.807045] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.896376] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.936262] env[62508]: DEBUG oslo_vmware.api [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775622, 'name': PowerOnVM_Task, 'duration_secs': 0.491663} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.936262] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.936262] env[62508]: INFO nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1424.936262] env[62508]: DEBUG nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1424.936992] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f094320-fe1b-49db-8900-340175cd6b89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.965885] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Successfully created port: f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1425.062444] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c88b5c3-3d0d-4b31-a2d4-264cf0d67096 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.071118] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8228b14d-c98c-4ff4-b2fc-74aa75938358 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.075215] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.075513] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance network_info: |[{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1425.075915] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:3c:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec6adf65-fbdf-4276-8e19-eb416336bbff', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1425.084783] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating folder: Project (ce113e91e2b74136a8050ed3acf3557c). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1425.085521] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d7c7fa1-dcf5-47f1-a4e8-4221d089a804 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.121840] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d3d052-b2cf-4706-ab55-ab3d81b28680 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.124523] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created folder: Project (ce113e91e2b74136a8050ed3acf3557c) in parent group-v368536. [ 1425.124833] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating folder: Instances. Parent ref: group-v368628. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1425.124926] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-459affcb-fcd9-4677-a492-83a76f49b022 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.132948] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315b3b8d-8dc8-458f-b200-5b6dae9ba80d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.137964] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created folder: Instances in parent group-v368628. [ 1425.138216] env[62508]: DEBUG oslo.service.loopingcall [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.138734] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1425.138949] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b39fd3af-7de2-43cd-a914-b245111cedae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.166255] env[62508]: DEBUG nova.compute.provider_tree [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.173331] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1425.173331] env[62508]: value = "task-1775625" [ 1425.173331] env[62508]: _type = "Task" [ 1425.173331] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.186575] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775625, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.239297] env[62508]: DEBUG nova.network.neutron [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1425.312708] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.456803] env[62508]: INFO nova.compute.manager [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 45.35 seconds to build instance. [ 1425.622728] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1425.647369] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1425.647625] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1425.647782] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.648287] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1425.648287] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.648287] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1425.648463] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1425.648592] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1425.648810] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1425.649023] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1425.649209] env[62508]: DEBUG nova.virt.hardware [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1425.650084] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad40ee3e-ec16-49e7-aa97-cfc703a3e035 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.658682] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638aa397-cf22-4a9f-8149-50a1a117686e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.677791] env[62508]: DEBUG nova.scheduler.client.report [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1425.690634] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775625, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.725800] env[62508]: DEBUG nova.network.neutron [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.959944] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5b159a13-9326-43a1-a394-3ef7552dadb7 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.424s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.186032] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.186032] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1426.201020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.456s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.201020] env[62508]: INFO nova.compute.claims [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1426.213875] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775625, 'name': CreateVM_Task, 'duration_secs': 0.705873} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.216469] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1426.216469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.216469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.216984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1426.217325] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b14f77-31ef-4017-a08d-578e3a64b59c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.223677] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1426.223677] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eac0fe-2891-e5c2-b6fe-ee96826c1a1a" [ 1426.223677] env[62508]: _type = "Task" [ 1426.223677] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.229911] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e4ff73d-17e1-4dc3-99a4-c7d489ad944d req-2f5004f3-b758-4162-9f7d-a804a18a0885 service nova] Releasing lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.233863] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.234143] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1426.236397] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eac0fe-2891-e5c2-b6fe-ee96826c1a1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.464138] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1426.708226] env[62508]: DEBUG nova.compute.utils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1426.714549] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1426.714876] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1426.735985] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eac0fe-2891-e5c2-b6fe-ee96826c1a1a, 'name': SearchDatastore_Task, 'duration_secs': 0.015362} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.738782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.738782] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1426.738782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.738945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.740497] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.740497] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24d92314-51a8-4bb1-93af-26a606e8886a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.753566] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.753768] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1426.759147] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e647111-8d7c-4247-840d-b0cad3633b4c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.765322] env[62508]: DEBUG nova.compute.manager [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Received event network-vif-plugged-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1426.765546] env[62508]: DEBUG oslo_concurrency.lockutils [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] Acquiring lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.765760] env[62508]: DEBUG oslo_concurrency.lockutils [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.766061] env[62508]: DEBUG oslo_concurrency.lockutils [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.766125] env[62508]: DEBUG nova.compute.manager [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] No waiting events found dispatching network-vif-plugged-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1426.766274] env[62508]: WARNING nova.compute.manager [req-f058d77f-812c-4424-b0a6-661f7d088faf req-ee1b66cf-753d-4edd-a07c-4e70040459d8 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Received unexpected event network-vif-plugged-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 for instance with vm_state building and task_state spawning. [ 1426.771804] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1426.771804] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52393bc2-deec-509e-0ff7-d9767d3fd9ab" [ 1426.771804] env[62508]: _type = "Task" [ 1426.771804] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.785100] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52393bc2-deec-509e-0ff7-d9767d3fd9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.812038] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.827645] env[62508]: DEBUG nova.policy [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '596ff35abb3949e9b3d3d9b80e6eae69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a9d6b8eb4e44a7a3d7fa4abe0cd5bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1426.922809] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Successfully updated port: f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1427.002449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.181628] env[62508]: DEBUG nova.network.neutron [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Updating instance_info_cache with network_info: [{"id": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "address": "fa:16:3e:fa:b1:10", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985905ec-2a", "ovs_interfaceid": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.221547] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1427.289314] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52393bc2-deec-509e-0ff7-d9767d3fd9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.025736} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.290178] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb182415-773d-46ef-8653-321410802e92 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.301706] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1427.301706] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522bce00-3974-b8cc-27ab-4911336a84fa" [ 1427.301706] env[62508]: _type = "Task" [ 1427.301706] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.309338] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522bce00-3974-b8cc-27ab-4911336a84fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.427422] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.427542] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.427695] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1427.457917] env[62508]: DEBUG nova.compute.manager [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1427.459033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c10640-115f-4bb7-8cf7-e87022ce7aa7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.477972] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Successfully created port: 4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1427.682913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.684193] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance network_info: |[{"id": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "address": "fa:16:3e:fa:b1:10", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985905ec-2a", "ovs_interfaceid": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1427.684193] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:b1:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '985905ec-2a79-4b7a-b4ad-d3bf00a42f43', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1427.692635] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating folder: Project (27e6f55b56be40d2a619f0119aefb2ae). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1427.694092] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ba8f26-789f-486e-864e-8f012e995ca1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.698125] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca385981-548f-4f71-8bc3-166e38213601 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.706316] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545348ba-e98f-4f25-bcc2-f34fad2ebc23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.711467] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created folder: Project (27e6f55b56be40d2a619f0119aefb2ae) in parent group-v368536. [ 1427.711732] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating folder: Instances. Parent ref: group-v368631. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1427.712368] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fafeae05-952e-407a-ac69-fe3bc5184d6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.750075] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d66b947-8121-4435-b1ea-e4f21e2e5bff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.754053] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created folder: Instances in parent group-v368631. [ 1427.754053] env[62508]: DEBUG oslo.service.loopingcall [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.754053] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1427.754509] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db109763-d8a7-440f-9668-33e2ebf257b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.779313] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c44b63-f4b1-48d7-92e6-394aacaba37c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.786827] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1427.786827] env[62508]: value = "task-1775628" [ 1427.786827] env[62508]: _type = "Task" [ 1427.786827] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.806270] env[62508]: DEBUG nova.compute.provider_tree [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.821397] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775628, 'name': CreateVM_Task} progress is 15%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.827555] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522bce00-3974-b8cc-27ab-4911336a84fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009753} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.827834] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.828152] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1427.828490] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2aaed998-170c-4fc8-a891-1bb3a2a9aa1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.837082] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1427.837082] env[62508]: value = "task-1775629" [ 1427.837082] env[62508]: _type = "Task" [ 1427.837082] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.845847] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.980476] env[62508]: INFO nova.compute.manager [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] instance snapshotting [ 1427.985019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00868b98-2d2a-4868-aae2-5d33a3f2147b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.012595] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0836525-ccc0-4b89-ab67-f6cfd4bdda31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.017649] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1428.258023] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1428.287329] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1428.287769] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1428.288400] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1428.288821] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1428.289090] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1428.289318] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1428.289647] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1428.289885] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1428.290148] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1428.290362] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1428.290575] env[62508]: DEBUG nova.virt.hardware [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1428.291615] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cd1ebd-590a-404f-822e-9ffb2a640232 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.305580] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775628, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.310423] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eafca31-de93-43c8-bd41-16826edf34ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.315722] env[62508]: DEBUG nova.scheduler.client.report [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1428.338512] env[62508]: DEBUG nova.network.neutron [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updating instance_info_cache with network_info: [{"id": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "address": "fa:16:3e:f7:62:93", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a2c0d6-ef", "ovs_interfaceid": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.353373] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775629, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.532754] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1428.533164] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2fec205f-06a7-4fcc-af12-aad637d13fbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.542646] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1428.542646] env[62508]: value = "task-1775630" [ 1428.542646] env[62508]: _type = "Task" [ 1428.542646] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.550459] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775630, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.803397] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775628, 'name': CreateVM_Task, 'duration_secs': 0.625747} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.803582] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1428.804905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.805243] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.805519] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1428.805782] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08206339-9ceb-451e-a0e9-be5886223d7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.810852] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1428.810852] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a8f93c-ad73-3215-2c59-3546b049868f" [ 1428.810852] env[62508]: _type = "Task" [ 1428.810852] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.822578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.823078] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1428.825907] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a8f93c-ad73-3215-2c59-3546b049868f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.827515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.634s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.827648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.829552] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.204s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.833388] env[62508]: INFO nova.compute.claims [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1428.841824] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.842233] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Instance network_info: |[{"id": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "address": "fa:16:3e:f7:62:93", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a2c0d6-ef", "ovs_interfaceid": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1428.849169] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:62:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1428.858610] env[62508]: DEBUG oslo.service.loopingcall [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1428.859907] env[62508]: DEBUG nova.compute.manager [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Received event network-changed-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.860205] env[62508]: DEBUG nova.compute.manager [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Refreshing instance network info cache due to event network-changed-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1428.860469] env[62508]: DEBUG oslo_concurrency.lockutils [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] Acquiring lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.860692] env[62508]: DEBUG oslo_concurrency.lockutils [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] Acquired lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.861805] env[62508]: DEBUG nova.network.neutron [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Refreshing network info cache for port f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.862370] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1428.865276] env[62508]: INFO nova.scheduler.client.report [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleted allocations for instance aae3b4a3-c954-4f73-bd12-9b19a675179c [ 1428.866017] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92d06168-3363-400b-aa11-4aaf7d0d6119 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.887604] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775629, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54099} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.889264] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1428.889484] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1428.889761] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0b669ea-1687-4abe-b89a-30c64b77508f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.894603] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1428.894603] env[62508]: value = "task-1775631" [ 1428.894603] env[62508]: _type = "Task" [ 1428.894603] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.900122] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1428.900122] env[62508]: value = "task-1775632" [ 1428.900122] env[62508]: _type = "Task" [ 1428.900122] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.909165] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775631, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.913183] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.052942] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775630, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.324846] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a8f93c-ad73-3215-2c59-3546b049868f, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.325176] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.325410] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1429.325686] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.326116] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.326350] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1429.326621] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93cdcbf2-2647-4f92-8f36-a505dd411bd0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.329987] env[62508]: DEBUG nova.compute.utils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1429.334787] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1429.334787] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1429.340553] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1429.340728] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1429.341855] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b4a2fcf-e21d-447f-9964-1e52c718e00d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.348335] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1429.348335] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525fe8d1-09bf-3411-a606-cbf534fa32aa" [ 1429.348335] env[62508]: _type = "Task" [ 1429.348335] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.357825] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525fe8d1-09bf-3411-a606-cbf534fa32aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.393443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63851d7d-173b-496f-8c53-dd00856bf119 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "aae3b4a3-c954-4f73-bd12-9b19a675179c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.074s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.405048] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775631, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.416461] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071318} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.416720] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1429.417532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c56e98-6af4-4e84-963f-e2f1bfc0e0b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.444422] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1429.445466] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ba9ec7d-5299-4bfd-a1b4-cc5bd42e8d80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.467704] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1429.467704] env[62508]: value = "task-1775633" [ 1429.467704] env[62508]: _type = "Task" [ 1429.467704] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.476695] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775633, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.533827] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.533827] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.553987] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775630, 'name': CreateSnapshot_Task, 'duration_secs': 0.933896} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.554389] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1429.555454] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85371bc1-d027-40e2-b510-8676fdb1e5bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.654020] env[62508]: DEBUG nova.network.neutron [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updated VIF entry in instance network info cache for port f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1429.654020] env[62508]: DEBUG nova.network.neutron [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updating instance_info_cache with network_info: [{"id": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "address": "fa:16:3e:f7:62:93", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a2c0d6-ef", "ovs_interfaceid": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.686364] env[62508]: DEBUG nova.policy [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1429.835857] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1429.859455] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525fe8d1-09bf-3411-a606-cbf534fa32aa, 'name': SearchDatastore_Task, 'duration_secs': 0.008835} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.860323] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1be9421e-42eb-41f1-95e7-8f42946363dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.866138] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1429.866138] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fecb64-20a4-3293-c2b6-a87731ba0eea" [ 1429.866138] env[62508]: _type = "Task" [ 1429.866138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.873879] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fecb64-20a4-3293-c2b6-a87731ba0eea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.903646] env[62508]: DEBUG nova.compute.manager [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Received event network-vif-plugged-4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1429.904490] env[62508]: DEBUG oslo_concurrency.lockutils [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] Acquiring lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.904490] env[62508]: DEBUG oslo_concurrency.lockutils [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.904490] env[62508]: DEBUG oslo_concurrency.lockutils [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.904490] env[62508]: DEBUG nova.compute.manager [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] No waiting events found dispatching network-vif-plugged-4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1429.904689] env[62508]: WARNING nova.compute.manager [req-c97f8471-c7f8-4e63-a320-3ad419532e60 req-ed88f544-dc24-494e-aff4-dae2ccda9adb service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Received unexpected event network-vif-plugged-4efa45b1-9d69-4e50-980d-b8a62b229d03 for instance with vm_state building and task_state spawning. [ 1429.908722] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775631, 'name': CreateVM_Task, 'duration_secs': 0.667515} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.911889] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1429.913656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.914143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.917506] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1429.918587] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33f22be5-11b2-4980-ae61-00aeb677e01f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.925330] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1429.925330] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5254889f-1c6e-bd56-ad0f-1e0d427a5e15" [ 1429.925330] env[62508]: _type = "Task" [ 1429.925330] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.941550] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5254889f-1c6e-bd56-ad0f-1e0d427a5e15, 'name': SearchDatastore_Task, 'duration_secs': 0.008278} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.941875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.942477] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1429.942477] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.981084] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775633, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.075644] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1430.078133] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7d9eff9e-8546-4dfc-84ff-54bc99a42f1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.094222] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1430.094222] env[62508]: value = "task-1775634" [ 1430.094222] env[62508]: _type = "Task" [ 1430.094222] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.110813] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775634, 'name': CloneVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.155136] env[62508]: DEBUG oslo_concurrency.lockutils [req-11ff5a31-4883-4a77-9b5e-5877616040d4 req-e9d4a2e7-f0a7-4b9f-9ed4-47b61b2969dc service nova] Releasing lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.232715] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Successfully updated port: 4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1430.358800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1a70c7-2a36-4349-b01e-888c8e20a856 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.370025] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2735b6ce-eb3c-4318-8a53-a5356426b448 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.380513] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fecb64-20a4-3293-c2b6-a87731ba0eea, 'name': SearchDatastore_Task, 'duration_secs': 0.009061} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.415115] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.415576] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/7015b188-17ca-45ec-8fe8-f80ef0f9cb0a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1430.416960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.417149] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1430.417378] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-300ddcd4-e21c-4f1e-803d-fdc801880874 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.420785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbe17d3-140b-432e-92f0-0b4863af2497 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.423281] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4f63059-3f2c-49de-b5f1-4e73f83474b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.432255] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Successfully created port: 2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1430.436632] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6cc7c8-4d30-40b4-aecf-a61de299b9d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.443597] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1430.443597] env[62508]: value = "task-1775635" [ 1430.443597] env[62508]: _type = "Task" [ 1430.443597] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.443906] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1430.444130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1430.445587] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93294e77-fdda-43d9-a9c4-efa36822c81a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.460275] env[62508]: DEBUG nova.compute.provider_tree [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.466689] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1430.466689] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a0d51b-765a-fc4a-bd15-ed011a2e15b0" [ 1430.466689] env[62508]: _type = "Task" [ 1430.466689] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.472501] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.482391] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a0d51b-765a-fc4a-bd15-ed011a2e15b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009427} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.486043] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775633, 'name': ReconfigVM_Task, 'duration_secs': 0.981224} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.486255] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15c21bf3-d895-4919-8063-c0aad915bad9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.488863] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1430.489483] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca59fdd-4873-41fd-a4a0-6f4cd3f2f4e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.494367] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1430.494367] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bed0a5-e76c-b254-b233-907db924b0aa" [ 1430.494367] env[62508]: _type = "Task" [ 1430.494367] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.499671] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1430.499671] env[62508]: value = "task-1775636" [ 1430.499671] env[62508]: _type = "Task" [ 1430.499671] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.505785] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bed0a5-e76c-b254-b233-907db924b0aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.510782] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775636, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.605329] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775634, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.736487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.736787] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.736957] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.848619] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1430.887279] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1430.887279] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1430.887279] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1430.887492] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1430.887492] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1430.887580] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1430.887796] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1430.887953] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1430.889309] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1430.889639] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1430.889920] env[62508]: DEBUG nova.virt.hardware [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1430.891523] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa7b141-d62a-41c0-81a3-245e83e5be66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.902185] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45b4770-deef-4178-9b45-2971de32b57c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.956913] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524241} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.957206] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/7015b188-17ca-45ec-8fe8-f80ef0f9cb0a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1430.957414] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1430.957673] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c7b35d2-18ec-40dc-b62e-eb187c0a5c56 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.965971] env[62508]: DEBUG nova.scheduler.client.report [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1430.970664] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1430.970664] env[62508]: value = "task-1775637" [ 1430.970664] env[62508]: _type = "Task" [ 1430.970664] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.984096] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775637, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.012052] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bed0a5-e76c-b254-b233-907db924b0aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009246} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.012052] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.012052] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/a10a4217-ae46-4f00-9ba1-cdf74f44ec7b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1431.012052] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62f0cd56-596a-4916-9f81-d2aaf1e5f496 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.017020] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775636, 'name': Rename_Task, 'duration_secs': 0.150101} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.018081] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1431.018244] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebbaa8f4-e147-4a06-96ed-59185c829d63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.022377] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1431.022377] env[62508]: value = "task-1775638" [ 1431.022377] env[62508]: _type = "Task" [ 1431.022377] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.028530] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1431.028530] env[62508]: value = "task-1775639" [ 1431.028530] env[62508]: _type = "Task" [ 1431.028530] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.039859] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775638, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.047970] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775639, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.108605] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775634, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.284966] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.474115] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.474358] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1431.478100] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.113s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.479132] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.481942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.204s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.484081] env[62508]: INFO nova.compute.claims [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1431.499648] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081495} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.500037] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1431.500933] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598fee73-f9af-414c-9502-4c5a72c492cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.506415] env[62508]: DEBUG nova.network.neutron [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Updating instance_info_cache with network_info: [{"id": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "address": "fa:16:3e:f1:8a:32", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4efa45b1-9d", "ovs_interfaceid": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.534618] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/7015b188-17ca-45ec-8fe8-f80ef0f9cb0a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1431.537326] env[62508]: INFO nova.scheduler.client.report [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Deleted allocations for instance a239d78f-085a-4e5c-924d-cf338298fa73 [ 1431.538590] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5721deb-2003-4fd5-99f8-6c4377e5621a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.575691] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775638, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.581959] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1431.581959] env[62508]: value = "task-1775640" [ 1431.581959] env[62508]: _type = "Task" [ 1431.581959] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.582163] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775639, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.593233] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775640, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.609780] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775634, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.997439] env[62508]: DEBUG nova.compute.utils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1432.011085] env[62508]: DEBUG nova.compute.manager [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Received event network-changed-4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1432.011322] env[62508]: DEBUG nova.compute.manager [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Refreshing instance network info cache due to event network-changed-4efa45b1-9d69-4e50-980d-b8a62b229d03. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1432.011936] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] Acquiring lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.012276] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1432.014818] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1432.015679] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.015679] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance network_info: |[{"id": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "address": "fa:16:3e:f1:8a:32", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4efa45b1-9d", "ovs_interfaceid": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1432.016753] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] Acquired lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.016753] env[62508]: DEBUG nova.network.neutron [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Refreshing network info cache for port 4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1432.018570] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:8a:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4efa45b1-9d69-4e50-980d-b8a62b229d03', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.028688] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating folder: Project (15a9d6b8eb4e44a7a3d7fa4abe0cd5bb). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1432.031623] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a257d22-7aa8-45df-93bf-e119cd9c87f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.048711] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created folder: Project (15a9d6b8eb4e44a7a3d7fa4abe0cd5bb) in parent group-v368536. [ 1432.048911] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating folder: Instances. Parent ref: group-v368637. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1432.050207] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-587d462c-6549-4338-9766-d868736d6c3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.062855] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775638, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575483} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.063152] env[62508]: DEBUG oslo_vmware.api [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1775639, 'name': PowerOnVM_Task, 'duration_secs': 0.926023} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.064046] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/a10a4217-ae46-4f00-9ba1-cdf74f44ec7b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1432.064877] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1432.065293] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1432.065601] env[62508]: INFO nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 11.68 seconds to spawn the instance on the hypervisor. [ 1432.065895] env[62508]: DEBUG nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1432.066616] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64483261-920d-4b06-bca2-e3725b30dbf4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.069218] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d16f87-ba1c-4a94-8fc4-f3b26fb78774 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.076949] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created folder: Instances in parent group-v368637. [ 1432.077522] env[62508]: DEBUG oslo.service.loopingcall [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.080684] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2be475a0-aded-4453-adb6-24d017fc40f0 tempest-ServerPasswordTestJSON-1117999681 tempest-ServerPasswordTestJSON-1117999681-project-member] Lock "a239d78f-085a-4e5c-924d-cf338298fa73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.683s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.080684] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1432.081199] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94075230-1594-4cbd-893c-a76cc899efee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.107611] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1432.107611] env[62508]: value = "task-1775643" [ 1432.107611] env[62508]: _type = "Task" [ 1432.107611] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.121735] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.121735] env[62508]: value = "task-1775644" [ 1432.121735] env[62508]: _type = "Task" [ 1432.121735] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.136166] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.136166] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.136166] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775634, 'name': CloneVM_Task, 'duration_secs': 1.708867} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.136166] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Created linked-clone VM from snapshot [ 1432.136736] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e0f47f-3805-4f0c-b66a-f38e4440ee29 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.145199] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775644, 'name': CreateVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.149281] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Uploading image 48904c02-82a2-4237-a426-b37b65d56dfa {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1432.191968] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1432.191968] env[62508]: value = "vm-368636" [ 1432.191968] env[62508]: _type = "VirtualMachine" [ 1432.191968] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1432.192686] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f2953bd6-e5d8-4fc2-93c2-266d54c6a2a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.200799] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lease: (returnval){ [ 1432.200799] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2e331-131f-f987-5046-f79b9820d022" [ 1432.200799] env[62508]: _type = "HttpNfcLease" [ 1432.200799] env[62508]: } obtained for exporting VM: (result){ [ 1432.200799] env[62508]: value = "vm-368636" [ 1432.200799] env[62508]: _type = "VirtualMachine" [ 1432.200799] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1432.201115] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the lease: (returnval){ [ 1432.201115] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2e331-131f-f987-5046-f79b9820d022" [ 1432.201115] env[62508]: _type = "HttpNfcLease" [ 1432.201115] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1432.208373] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1432.208373] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2e331-131f-f987-5046-f79b9820d022" [ 1432.208373] env[62508]: _type = "HttpNfcLease" [ 1432.208373] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1432.221345] env[62508]: DEBUG nova.policy [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2566b097945462bbdcd9fc8be93d191', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '314bea56e49b40ada7d4b952d0a2f596', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1432.515872] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1432.620865] env[62508]: INFO nova.compute.manager [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 51.30 seconds to build instance. [ 1432.629756] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775640, 'name': ReconfigVM_Task, 'duration_secs': 0.983684} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.634787] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/7015b188-17ca-45ec-8fe8-f80ef0f9cb0a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1432.635933] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-024bb7b8-d58b-4ba4-a6aa-5a61d71bf809 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.642685] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190339} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.643667] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1432.652287] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba99fda8-238d-4791-ba93-84a2892f2bd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.655222] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775644, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.657916] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1432.657916] env[62508]: value = "task-1775646" [ 1432.657916] env[62508]: _type = "Task" [ 1432.657916] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.687244] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/a10a4217-ae46-4f00-9ba1-cdf74f44ec7b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1432.693241] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efdb0d4f-d16c-4fa4-8378-1ac5d9a74ef7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.714462] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775646, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.721457] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1432.721457] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2e331-131f-f987-5046-f79b9820d022" [ 1432.721457] env[62508]: _type = "HttpNfcLease" [ 1432.721457] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1432.723142] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1432.723142] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2e331-131f-f987-5046-f79b9820d022" [ 1432.723142] env[62508]: _type = "HttpNfcLease" [ 1432.723142] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1432.723521] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1432.723521] env[62508]: value = "task-1775647" [ 1432.723521] env[62508]: _type = "Task" [ 1432.723521] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.726664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d671895c-4920-45f6-b855-f3444280645e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.739570] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1432.739570] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1432.744107] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775647, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.913149] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-efa5aa15-754d-49c3-8943-5e15d30f694c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.917526] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Successfully updated port: 2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1433.091164] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Successfully created port: 49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1433.127435] env[62508]: DEBUG oslo_concurrency.lockutils [None req-73765367-ea2e-4fb3-a510-898f6ffddee5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.335s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.154043] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775644, 'name': CreateVM_Task, 'duration_secs': 0.740247} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.154043] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.154043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.154043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.154043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.154043] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee005eb6-77ac-4604-90b0-9566380159e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.163968] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1433.163968] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e76dd2-e4d3-1e2c-57eb-4f7e2560f4b7" [ 1433.163968] env[62508]: _type = "Task" [ 1433.163968] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.183926] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775646, 'name': Rename_Task, 'duration_secs': 0.182938} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.188082] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1433.188886] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e76dd2-e4d3-1e2c-57eb-4f7e2560f4b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011592} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.189142] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7498304c-a427-4fb9-816e-3d34bb260f72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.191106] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.191385] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.191681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.191905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.192160] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.203743] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a2beee2-bc2d-4abb-96fc-67128b7780e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.217969] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1433.217969] env[62508]: value = "task-1775648" [ 1433.217969] env[62508]: _type = "Task" [ 1433.217969] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.221055] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221cf1a0-0f92-45e3-8fd4-1042284e7b3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.224431] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.224573] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1433.228877] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a476a3a-5977-44e5-a0af-f14cf378144d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.245697] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775648, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.249032] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bc17a1-d5e7-4523-84c5-7c5832e418cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.253247] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1433.253247] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5287ce16-0ee3-a84d-583f-71c3ae82b4ad" [ 1433.253247] env[62508]: _type = "Task" [ 1433.253247] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.258631] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775647, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.292546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eced99cd-af29-4e79-b3cd-942a7924832f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.299966] env[62508]: DEBUG nova.network.neutron [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Updated VIF entry in instance network info cache for port 4efa45b1-9d69-4e50-980d-b8a62b229d03. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.300572] env[62508]: DEBUG nova.network.neutron [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Updating instance_info_cache with network_info: [{"id": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "address": "fa:16:3e:f1:8a:32", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4efa45b1-9d", "ovs_interfaceid": "4efa45b1-9d69-4e50-980d-b8a62b229d03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.302033] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5287ce16-0ee3-a84d-583f-71c3ae82b4ad, 'name': SearchDatastore_Task, 'duration_secs': 0.018283} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.303407] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b14b292-5654-4c23-8e14-50b8811efb62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.314463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50a8b5b-5355-4ea1-9af6-1091b5c92c06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.320382] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1433.320382] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205d33a-4774-e7f1-1ffd-fe173519f9f2" [ 1433.320382] env[62508]: _type = "Task" [ 1433.320382] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.337318] env[62508]: DEBUG nova.compute.provider_tree [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1433.342458] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205d33a-4774-e7f1-1ffd-fe173519f9f2, 'name': SearchDatastore_Task, 'duration_secs': 0.011553} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.343188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.344037] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] db74146d-abc3-4d48-be1b-6ad471794dbf/db74146d-abc3-4d48-be1b-6ad471794dbf.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1433.344037] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46bfa03e-eed3-4ada-a81c-c94e317cc4fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.352817] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1433.352817] env[62508]: value = "task-1775649" [ 1433.352817] env[62508]: _type = "Task" [ 1433.352817] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.362502] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.420617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.424143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.424143] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1433.529174] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1433.568779] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1433.569145] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1433.569382] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1433.569653] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1433.569890] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1433.570075] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1433.570350] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1433.570510] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1433.570793] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1433.571016] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1433.571305] env[62508]: DEBUG nova.virt.hardware [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1433.572682] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f310812-978e-43e4-bfc6-ec5c66878818 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.582394] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850d3fc5-448a-4679-b5bb-69768ec72465 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.632530] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1433.733284] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.744850] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775647, 'name': ReconfigVM_Task, 'duration_secs': 0.5914} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.745482] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Reconfigured VM instance instance-00000024 to attach disk [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/a10a4217-ae46-4f00-9ba1-cdf74f44ec7b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1433.745820] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-585e15c5-7467-481a-9d30-da8feb567a73 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.754508] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1433.754508] env[62508]: value = "task-1775650" [ 1433.754508] env[62508]: _type = "Task" [ 1433.754508] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.765254] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775650, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.810839] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ea6704-3219-4430-8c7c-15d3d514342f req-ede8da32-3a47-4c17-9243-71e6e31a0664 service nova] Releasing lock "refresh_cache-db74146d-abc3-4d48-be1b-6ad471794dbf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.862780] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775649, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.882145] env[62508]: ERROR nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [req-60323f4a-5da1-4453-ae5c-f848d7cd018a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-60323f4a-5da1-4453-ae5c-f848d7cd018a"}]} [ 1433.904939] env[62508]: DEBUG nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1433.920036] env[62508]: DEBUG nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1433.920036] env[62508]: DEBUG nova.compute.provider_tree [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1433.934998] env[62508]: DEBUG nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1433.956736] env[62508]: DEBUG nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1433.992085] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1434.165919] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.166182] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.178907] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.237934] env[62508]: DEBUG oslo_vmware.api [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775648, 'name': PowerOnVM_Task, 'duration_secs': 0.798318} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.239222] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1434.243189] env[62508]: INFO nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Took 11.30 seconds to spawn the instance on the hypervisor. [ 1434.243189] env[62508]: DEBUG nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1434.244105] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea51accf-bcf9-45ea-9de8-e77907128801 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.270736] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775650, 'name': Rename_Task, 'duration_secs': 0.215857} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.270962] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1434.271213] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e213ce9-de36-40d3-9cf9-07ee97d5e143 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.277893] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1434.277893] env[62508]: value = "task-1775651" [ 1434.277893] env[62508]: _type = "Task" [ 1434.277893] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.298548] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.371610] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526305} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.372040] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] db74146d-abc3-4d48-be1b-6ad471794dbf/db74146d-abc3-4d48-be1b-6ad471794dbf.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1434.372368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1434.372738] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7475721-3202-4bd8-ac18-3a7cb30a566b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.384476] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1434.384476] env[62508]: value = "task-1775652" [ 1434.384476] env[62508]: _type = "Task" [ 1434.384476] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.396640] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.401950] env[62508]: DEBUG nova.network.neutron [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.420382] env[62508]: DEBUG nova.compute.manager [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-vif-plugged-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.420382] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.420665] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.420800] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.424266] env[62508]: DEBUG nova.compute.manager [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] No waiting events found dispatching network-vif-plugged-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1434.424266] env[62508]: WARNING nova.compute.manager [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received unexpected event network-vif-plugged-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 for instance with vm_state building and task_state spawning. [ 1434.424266] env[62508]: DEBUG nova.compute.manager [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-changed-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.424266] env[62508]: DEBUG nova.compute.manager [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing instance network info cache due to event network-changed-2623e6a9-42b1-4f98-9d68-a5230cdc3d79. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1434.424266] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.524881] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235daeb4-ddcc-453b-816d-6d7f33ee5f7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.533138] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0fac00-e199-414e-b652-43eefe446868 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.565301] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a006c27-21af-43bd-8976-93aed2fca484 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.574357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefd4034-fc57-46d3-aa37-4637a9ed1e59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.589741] env[62508]: DEBUG nova.compute.provider_tree [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1434.770488] env[62508]: INFO nova.compute.manager [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Took 47.91 seconds to build instance. [ 1434.790183] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775651, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.895348] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12801} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.895703] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1434.896720] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c7f5d6-f2e8-4a24-a5f0-4352a8393b7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.904656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.904988] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Instance network_info: |[{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1434.905308] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.905498] env[62508]: DEBUG nova.network.neutron [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing network info cache for port 2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1434.909848] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:f1:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2623e6a9-42b1-4f98-9d68-a5230cdc3d79', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1434.925483] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating folder: Project (b46df14344794f29a8b0c00408d18159). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1434.937887] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-996bb182-5e91-43ee-b45f-c3119baa1fa4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.949432] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] db74146d-abc3-4d48-be1b-6ad471794dbf/db74146d-abc3-4d48-be1b-6ad471794dbf.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1434.950465] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3248dfad-8e31-400f-b27a-3a2a9be7f683 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.971481] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1434.971481] env[62508]: value = "task-1775654" [ 1434.971481] env[62508]: _type = "Task" [ 1434.971481] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.976600] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created folder: Project (b46df14344794f29a8b0c00408d18159) in parent group-v368536. [ 1434.976746] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating folder: Instances. Parent ref: group-v368640. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1434.977382] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d0ed133-2353-44ab-9699-976fd5cf964c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.983347] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775654, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.986677] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created folder: Instances in parent group-v368640. [ 1434.986986] env[62508]: DEBUG oslo.service.loopingcall [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.987123] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1434.988566] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39ece655-89b3-4a67-9f67-6ad34b0f82bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.009689] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1435.009689] env[62508]: value = "task-1775656" [ 1435.009689] env[62508]: _type = "Task" [ 1435.009689] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.018989] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775656, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.142599] env[62508]: DEBUG nova.scheduler.client.report [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1435.142893] env[62508]: DEBUG nova.compute.provider_tree [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 64 to 65 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1435.143127] env[62508]: DEBUG nova.compute.provider_tree [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1435.275200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8c2ae163-ad1a-4727-a931-d0ad6c5b22c3 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.017s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.295724] env[62508]: DEBUG oslo_vmware.api [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775651, 'name': PowerOnVM_Task, 'duration_secs': 0.966194} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.296374] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1435.296850] env[62508]: INFO nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1435.297250] env[62508]: DEBUG nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1435.299247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1602b21-0d0c-4065-abd0-e6963fa0dbef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.483069] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775654, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.524092] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775656, 'name': CreateVM_Task, 'duration_secs': 0.455677} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.524460] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1435.525620] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.525991] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.526439] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1435.526861] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1347dde6-75c5-43b7-864e-5b7556f46cc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.532211] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1435.532211] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522b7d20-9a14-c640-66cc-36630de083e4" [ 1435.532211] env[62508]: _type = "Task" [ 1435.532211] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.542307] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522b7d20-9a14-c640-66cc-36630de083e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.649754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.167s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.649839] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1435.656030] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.093s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.656030] env[62508]: DEBUG nova.objects.instance [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lazy-loading 'resources' on Instance uuid ffe54977-81c4-4842-9773-eed704a53ada {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1435.754999] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Successfully updated port: 49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.779020] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1435.831242] env[62508]: INFO nova.compute.manager [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Took 45.51 seconds to build instance. [ 1435.901290] env[62508]: DEBUG nova.network.neutron [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updated VIF entry in instance network info cache for port 2623e6a9-42b1-4f98-9d68-a5230cdc3d79. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1435.901819] env[62508]: DEBUG nova.network.neutron [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.983584] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775654, 'name': ReconfigVM_Task, 'duration_secs': 0.558864} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.983890] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Reconfigured VM instance instance-00000025 to attach disk [datastore1] db74146d-abc3-4d48-be1b-6ad471794dbf/db74146d-abc3-4d48-be1b-6ad471794dbf.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.984584] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1824c230-fe73-45cd-9d0f-cc1fc4e4e5cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.991381] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1435.991381] env[62508]: value = "task-1775657" [ 1435.991381] env[62508]: _type = "Task" [ 1435.991381] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.999895] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775657, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.044881] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522b7d20-9a14-c640-66cc-36630de083e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009982} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.048465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.048610] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1436.048875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.049036] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.049227] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1436.049519] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a96cd79-3916-4ba5-8270-2f6f40e29f79 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.063230] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1436.063230] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1436.063230] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c7c7f5-9b77-4c05-b049-b4e02d361ec4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.070022] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1436.070022] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ee744-a233-5eaf-75b4-abb3abcb110e" [ 1436.070022] env[62508]: _type = "Task" [ 1436.070022] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.078604] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ee744-a233-5eaf-75b4-abb3abcb110e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.157650] env[62508]: DEBUG nova.compute.utils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1436.167028] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1436.167028] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1436.256731] env[62508]: DEBUG nova.policy [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c96cc4a58a4321837c1ab8badc686a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0156fba01363470eaa9771d5f296f730', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1436.258817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.259063] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquired lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.259135] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.304422] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.336485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6a1866ee-6708-4ddb-863e-2a9d4126b359 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.288s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.404520] env[62508]: DEBUG oslo_concurrency.lockutils [req-fe1c09f2-cd14-4978-ae99-186ff94f39ee req-4397bfcb-8920-4b9f-8299-cdf97a388f3e service nova] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.504204] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775657, 'name': Rename_Task, 'duration_secs': 0.207331} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.504564] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.507936] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e62b329d-f5ca-49af-8bab-bb8a95efb2cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.516420] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1436.516420] env[62508]: value = "task-1775658" [ 1436.516420] env[62508]: _type = "Task" [ 1436.516420] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.528880] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.536151] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Received event network-vif-plugged-49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1436.536404] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Acquiring lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.537016] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.537016] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.539592] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] No waiting events found dispatching network-vif-plugged-49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1436.539913] env[62508]: WARNING nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Received unexpected event network-vif-plugged-49925acf-7ad5-4349-bc32-67f5ba20e54d for instance with vm_state building and task_state spawning. [ 1436.542364] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Received event network-changed-49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1436.542364] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Refreshing instance network info cache due to event network-changed-49925acf-7ad5-4349-bc32-67f5ba20e54d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1436.542364] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Acquiring lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.595349] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ee744-a233-5eaf-75b4-abb3abcb110e, 'name': SearchDatastore_Task, 'duration_secs': 0.014602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.595349] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56baa57-e091-4f29-b56a-6cdeb84e3cef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.602169] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1436.602169] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52717bed-0d81-b700-e639-a038e64975fe" [ 1436.602169] env[62508]: _type = "Task" [ 1436.602169] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.617072] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52717bed-0d81-b700-e639-a038e64975fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010624} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.617534] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.617720] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 70c8de27-4696-4005-bbec-e7a33e56311b/70c8de27-4696-4005-bbec-e7a33e56311b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1436.617992] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b969e76-21a7-4c00-9746-13750248cf2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.625260] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1436.625260] env[62508]: value = "task-1775659" [ 1436.625260] env[62508]: _type = "Task" [ 1436.625260] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.643800] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.667211] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1436.716366] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Successfully created port: bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1436.770036] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbeb20b-eca8-42f4-afdc-6788687a5818 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.776821] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73e3380-a1c4-458b-8634-0dbaafe54580 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.809585] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5be68c3-ae61-416c-90d9-629bbae19a97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.818532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5a2fb0-0468-4b13-b254-09e271ee82a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.833353] env[62508]: DEBUG nova.compute.provider_tree [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.840671] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1436.855659] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.028353] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775658, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.140086] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775659, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.227987] env[62508]: DEBUG nova.network.neutron [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Updating instance_info_cache with network_info: [{"id": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "address": "fa:16:3e:53:ab:9e", "network": {"id": "545d35e2-d7d9-40d3-b74c-406b655947a0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-781281175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "314bea56e49b40ada7d4b952d0a2f596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49925acf-7a", "ovs_interfaceid": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.337278] env[62508]: DEBUG nova.scheduler.client.report [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.358948] env[62508]: INFO nova.compute.manager [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Rescuing [ 1437.359251] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.359432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.359615] env[62508]: DEBUG nova.network.neutron [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.368859] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.527766] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775658, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.637319] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545403} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.637631] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 70c8de27-4696-4005-bbec-e7a33e56311b/70c8de27-4696-4005-bbec-e7a33e56311b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1437.638220] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1437.638569] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59118524-f9c6-48f0-af5c-5335cfa191ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.646573] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1437.646573] env[62508]: value = "task-1775664" [ 1437.646573] env[62508]: _type = "Task" [ 1437.646573] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.657417] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.679483] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.709020] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1437.709369] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1437.709369] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1437.709531] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1437.709757] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1437.709956] env[62508]: DEBUG nova.virt.hardware [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1437.710962] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921a593d-974a-4f1f-9d36-e9f7377dc64d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.721200] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b8e782-5701-475f-90f5-bd456f1da7b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.739494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Releasing lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.739859] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Instance network_info: |[{"id": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "address": "fa:16:3e:53:ab:9e", "network": {"id": "545d35e2-d7d9-40d3-b74c-406b655947a0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-781281175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "314bea56e49b40ada7d4b952d0a2f596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49925acf-7a", "ovs_interfaceid": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1437.740427] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Acquired lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.740634] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Refreshing network info cache for port 49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1437.741955] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:ab:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49925acf-7ad5-4349-bc32-67f5ba20e54d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.753986] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Creating folder: Project (314bea56e49b40ada7d4b952d0a2f596). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.755868] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b186c3b-c87d-403f-b671-cea4bbf10e1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.767318] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Created folder: Project (314bea56e49b40ada7d4b952d0a2f596) in parent group-v368536. [ 1437.767592] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Creating folder: Instances. Parent ref: group-v368646. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.767906] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58f0e3cb-a021-478a-8d03-18b70274a083 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.779040] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Created folder: Instances in parent group-v368646. [ 1437.779040] env[62508]: DEBUG oslo.service.loopingcall [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.779040] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1437.779040] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf12467d-7a5b-4f70-86d6-31b4140008e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.802343] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.802343] env[62508]: value = "task-1775667" [ 1437.802343] env[62508]: _type = "Task" [ 1437.802343] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.810532] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775667, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.843390] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.845926] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.512s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.846296] env[62508]: DEBUG nova.objects.instance [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lazy-loading 'resources' on Instance uuid 18ee140a-97bd-439a-8027-0dd0a1f0a6e2 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1437.866372] env[62508]: INFO nova.scheduler.client.report [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleted allocations for instance ffe54977-81c4-4842-9773-eed704a53ada [ 1438.029882] env[62508]: DEBUG oslo_vmware.api [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775658, 'name': PowerOnVM_Task, 'duration_secs': 1.434907} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.032610] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1438.032911] env[62508]: INFO nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Took 9.78 seconds to spawn the instance on the hypervisor. [ 1438.033125] env[62508]: DEBUG nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1438.033975] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb380ae-49c3-4cef-b2fb-a9c5194602a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.121341] env[62508]: DEBUG nova.network.neutron [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updating instance_info_cache with network_info: [{"id": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "address": "fa:16:3e:f7:62:93", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8a2c0d6-ef", "ovs_interfaceid": "f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.156338] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07879} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.156535] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1438.157322] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778e904d-9ea2-4691-93c7-66aecd0ab781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.182171] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 70c8de27-4696-4005-bbec-e7a33e56311b/70c8de27-4696-4005-bbec-e7a33e56311b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.182520] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c9d1774-0bec-42b6-aa26-169fddfc38b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.202257] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1438.202257] env[62508]: value = "task-1775668" [ 1438.202257] env[62508]: _type = "Task" [ 1438.202257] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.210720] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775668, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.312457] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775667, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.375766] env[62508]: DEBUG oslo_concurrency.lockutils [None req-517e345d-d4cc-4711-9f78-5bcc3fb6fec6 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "ffe54977-81c4-4842-9773-eed704a53ada" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.288s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.554353] env[62508]: INFO nova.compute.manager [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Took 43.23 seconds to build instance. [ 1438.578749] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Updated VIF entry in instance network info cache for port 49925acf-7ad5-4349-bc32-67f5ba20e54d. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1438.579136] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Updating instance_info_cache with network_info: [{"id": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "address": "fa:16:3e:53:ab:9e", "network": {"id": "545d35e2-d7d9-40d3-b74c-406b655947a0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-781281175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "314bea56e49b40ada7d4b952d0a2f596", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49925acf-7a", "ovs_interfaceid": "49925acf-7ad5-4349-bc32-67f5ba20e54d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.627404] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.718846] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775668, 'name': ReconfigVM_Task, 'duration_secs': 0.318484} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.722556] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 70c8de27-4696-4005-bbec-e7a33e56311b/70c8de27-4696-4005-bbec-e7a33e56311b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.723356] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0b25984-4fea-49ab-af0b-baec70d0bc9e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.729570] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1438.729570] env[62508]: value = "task-1775669" [ 1438.729570] env[62508]: _type = "Task" [ 1438.729570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.741119] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775669, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.758413] env[62508]: DEBUG nova.compute.manager [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Received event network-vif-plugged-bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1438.758687] env[62508]: DEBUG oslo_concurrency.lockutils [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] Acquiring lock "38d294a9-2f51-438d-b942-a88e380a981f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.758905] env[62508]: DEBUG oslo_concurrency.lockutils [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] Lock "38d294a9-2f51-438d-b942-a88e380a981f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.759133] env[62508]: DEBUG oslo_concurrency.lockutils [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] Lock "38d294a9-2f51-438d-b942-a88e380a981f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.759318] env[62508]: DEBUG nova.compute.manager [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] No waiting events found dispatching network-vif-plugged-bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1438.759483] env[62508]: WARNING nova.compute.manager [req-8faa2c7e-114d-4f64-85c1-f4de28717f52 req-58a4ad4b-00a3-4946-93ed-3ef263dd995d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Received unexpected event network-vif-plugged-bf83eb47-d009-45ec-9583-6e3d46a6f0f8 for instance with vm_state building and task_state spawning. [ 1438.812167] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Successfully updated port: bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1438.819676] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775667, 'name': CreateVM_Task, 'duration_secs': 0.547302} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.820182] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.821129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.821129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.821260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.821448] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15285a5f-b4e3-46f8-8970-c8b3d9828801 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.828238] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1438.828238] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a55811-603c-0d0f-1bc1-71d828464189" [ 1438.828238] env[62508]: _type = "Task" [ 1438.828238] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.839571] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a55811-603c-0d0f-1bc1-71d828464189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.858319] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd979b5-926a-43f4-ae5a-570dc685410a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.867536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c4b752-76d9-4309-aaf5-c4be970e43c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.910088] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5ff8de-6ad4-4f3a-a652-276bdaeb459f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.918556] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593799bb-81f3-491a-82f5-cee1da541518 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.923174] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "db74146d-abc3-4d48-be1b-6ad471794dbf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.935140] env[62508]: DEBUG nova.compute.provider_tree [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.057023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6dc31604-3590-4bc5-a5ee-337155a90760 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.672s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.058345] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.135s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.058541] env[62508]: DEBUG nova.compute.manager [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1439.059470] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56132e7-3ebc-4ff1-b09d-77c786e7b40e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.067218] env[62508]: DEBUG nova.compute.manager [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1439.067893] env[62508]: DEBUG nova.objects.instance [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lazy-loading 'flavor' on Instance uuid db74146d-abc3-4d48-be1b-6ad471794dbf {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1439.081529] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Releasing lock "refresh_cache-2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.081833] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Received event network-changed-ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1439.082030] env[62508]: DEBUG nova.compute.manager [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Refreshing instance network info cache due to event network-changed-ec6adf65-fbdf-4276-8e19-eb416336bbff. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1439.082270] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.082409] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.082572] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Refreshing network info cache for port ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.166018] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.166389] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9972a44e-81b4-43b4-9843-7f5443e878e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.175578] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1439.175578] env[62508]: value = "task-1775670" [ 1439.175578] env[62508]: _type = "Task" [ 1439.175578] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.188841] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.242093] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775669, 'name': Rename_Task, 'duration_secs': 0.149299} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.242438] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.242839] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6676953a-dfd3-4a16-9aa7-61e4a798409a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.249415] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1439.249415] env[62508]: value = "task-1775671" [ 1439.249415] env[62508]: _type = "Task" [ 1439.249415] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.258967] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.314514] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.314755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.314926] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1439.341097] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a55811-603c-0d0f-1bc1-71d828464189, 'name': SearchDatastore_Task, 'duration_secs': 0.011025} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.344857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.344857] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1439.344857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.344857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.344857] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.344857] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae28d138-6528-4ab8-9652-f9a7ce36f0a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.353843] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.354233] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1439.355902] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c2d308-c468-423d-9178-10725d19286e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.362484] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1439.362484] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529d4d36-4d4d-a2f7-2cdd-84040dc6ab75" [ 1439.362484] env[62508]: _type = "Task" [ 1439.362484] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.378186] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529d4d36-4d4d-a2f7-2cdd-84040dc6ab75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.439153] env[62508]: DEBUG nova.scheduler.client.report [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1439.505965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.505965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.506192] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.506265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.506436] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.512347] env[62508]: INFO nova.compute.manager [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Terminating instance [ 1439.514513] env[62508]: DEBUG nova.compute.manager [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1439.516222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1439.516222] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0c801d-0b68-449b-93d5-c3ba1f1ef8a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.525884] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.525884] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07192740-7d82-4e75-92e9-b88cce239bf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.531118] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1439.531118] env[62508]: value = "task-1775672" [ 1439.531118] env[62508]: _type = "Task" [ 1439.531118] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.540231] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.563055] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1439.575161] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.575161] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe531a73-e86b-486b-ba46-1faa1284b78a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.583893] env[62508]: DEBUG oslo_vmware.api [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1439.583893] env[62508]: value = "task-1775673" [ 1439.583893] env[62508]: _type = "Task" [ 1439.583893] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.599019] env[62508]: DEBUG oslo_vmware.api [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.692508] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775670, 'name': PowerOffVM_Task, 'duration_secs': 0.226919} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.692743] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1439.693664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2755c61-50b0-4ea4-b7b4-8e8f80dd307b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.716023] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ad8421-e16d-451d-b6f0-2a3417e60865 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.762129] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775671, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.876069] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.887152] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529d4d36-4d4d-a2f7-2cdd-84040dc6ab75, 'name': SearchDatastore_Task, 'duration_secs': 0.016294} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.888051] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3487a65e-f0b0-4baf-bdd9-2c08d89ca9e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.899654] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1439.899654] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a6f385-f2ca-f89b-72c5-94757d0b6ded" [ 1439.899654] env[62508]: _type = "Task" [ 1439.899654] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.912190] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a6f385-f2ca-f89b-72c5-94757d0b6ded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.948246] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.953650] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.633s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.959640] env[62508]: INFO nova.compute.claims [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1439.970368] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.970694] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9cdf509-c748-4327-ac23-96bc5850551b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.984084] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1439.984084] env[62508]: value = "task-1775674" [ 1439.984084] env[62508]: _type = "Task" [ 1439.984084] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.993085] env[62508]: INFO nova.scheduler.client.report [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Deleted allocations for instance 18ee140a-97bd-439a-8027-0dd0a1f0a6e2 [ 1440.002537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "73452964-d690-451d-98c3-fba3c3301c6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.002537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.002537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "73452964-d690-451d-98c3-fba3c3301c6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.002537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.002537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.004604] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1440.004604] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.004604] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.005112] env[62508]: INFO nova.compute.manager [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Terminating instance [ 1440.010132] env[62508]: DEBUG nova.compute.manager [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1440.010355] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1440.011589] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9a942f-1ec7-416b-a5f8-11a99f56898e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.022857] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1440.024039] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfe15771-282d-4176-b93a-2535d00771f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.034195] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1440.034195] env[62508]: value = "task-1775675" [ 1440.034195] env[62508]: _type = "Task" [ 1440.034195] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.034195] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updated VIF entry in instance network info cache for port ec6adf65-fbdf-4276-8e19-eb416336bbff. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.034195] env[62508]: DEBUG nova.network.neutron [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.050305] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775672, 'name': PowerOffVM_Task, 'duration_secs': 0.214022} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.054624] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.054969] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1440.055215] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.055474] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1e85d89-a9b8-4e8f-a595-3abe6b8de555 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.090265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.098087] env[62508]: DEBUG oslo_vmware.api [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775673, 'name': PowerOffVM_Task, 'duration_secs': 0.286699} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.098285] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.098578] env[62508]: DEBUG nova.compute.manager [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.099569] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba39b0d4-01a7-43d0-9089-d999d00cd6ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.143833] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1440.144308] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1440.144527] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleting the datastore file [datastore1] e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1440.145588] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe96d75e-de42-4c42-af8f-1d161938531f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.153363] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for the task: (returnval){ [ 1440.153363] env[62508]: value = "task-1775677" [ 1440.153363] env[62508]: _type = "Task" [ 1440.153363] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.164813] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.197491] env[62508]: DEBUG nova.network.neutron [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Updating instance_info_cache with network_info: [{"id": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "address": "fa:16:3e:d1:95:84", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf83eb47-d0", "ovs_interfaceid": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.265894] env[62508]: DEBUG oslo_vmware.api [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1775671, 'name': PowerOnVM_Task, 'duration_secs': 0.593979} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.266196] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.266369] env[62508]: INFO nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Took 9.42 seconds to spawn the instance on the hypervisor. [ 1440.266714] env[62508]: DEBUG nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.268237] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00ecf89-a0a6-4be1-8e66-d0f7e4e6b724 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.418718] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a6f385-f2ca-f89b-72c5-94757d0b6ded, 'name': SearchDatastore_Task, 'duration_secs': 0.020657} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.419086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.419308] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5/2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.419655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.420205] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.420459] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c347ddd-a202-489a-b18d-0285c5e3c414 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.423408] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5bb66d4-8691-4ae5-a1dd-a2ddcc4df8b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.432436] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1440.432436] env[62508]: value = "task-1775678" [ 1440.432436] env[62508]: _type = "Task" [ 1440.432436] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.439754] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.439950] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1440.443877] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46620b3f-8345-40ce-8745-68ec1c87383a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.446318] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.451264] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1440.451264] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7bbc7-7ca2-6190-4e3c-59fa02866206" [ 1440.451264] env[62508]: _type = "Task" [ 1440.451264] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.462202] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7bbc7-7ca2-6190-4e3c-59fa02866206, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.509114] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7f12ba90-183c-4e79-958b-9f3fc0a43cbb tempest-ServerDiagnosticsTest-498192083 tempest-ServerDiagnosticsTest-498192083-project-member] Lock "18ee140a-97bd-439a-8027-0dd0a1f0a6e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.306s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.540987] env[62508]: DEBUG oslo_concurrency.lockutils [req-5fc2b741-3681-4e64-99ee-aed35114233c req-75724f6e-4097-49f1-a80e-3f08ae8318b8 service nova] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.555153] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775675, 'name': PowerOffVM_Task, 'duration_secs': 0.323962} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.555374] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.555537] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1440.555809] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0801afc1-5c33-422c-9a93-b66ee6e97e5c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.615429] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3606f3e-944d-43b6-a8ee-a055dda808ef tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.631071] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1440.631481] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1440.631481] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Deleting the datastore file [datastore1] 73452964-d690-451d-98c3-fba3c3301c6d {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1440.632082] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d296574a-c235-4280-8b79-7e6639a3ed3e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.644320] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for the task: (returnval){ [ 1440.644320] env[62508]: value = "task-1775681" [ 1440.644320] env[62508]: _type = "Task" [ 1440.644320] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.654074] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.665425] env[62508]: DEBUG oslo_vmware.api [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Task: {'id': task-1775677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338548} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.666035] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1440.666035] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1440.666457] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1440.666457] env[62508]: INFO nova.compute.manager [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1440.666645] env[62508]: DEBUG oslo.service.loopingcall [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.666779] env[62508]: DEBUG nova.compute.manager [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1440.666877] env[62508]: DEBUG nova.network.neutron [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1440.704965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.705349] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance network_info: |[{"id": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "address": "fa:16:3e:d1:95:84", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf83eb47-d0", "ovs_interfaceid": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1440.706188] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:95:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf83eb47-d009-45ec-9583-6e3d46a6f0f8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1440.715419] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating folder: Project (0156fba01363470eaa9771d5f296f730). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.715792] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d869abb-4e01-41b6-8a61-9eb9765f66e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.729495] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created folder: Project (0156fba01363470eaa9771d5f296f730) in parent group-v368536. [ 1440.729741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating folder: Instances. Parent ref: group-v368649. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.730016] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c05bcc34-7b00-4b76-9dde-59a5f422db47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.739808] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created folder: Instances in parent group-v368649. [ 1440.741747] env[62508]: DEBUG oslo.service.loopingcall [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.741747] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1440.741747] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f29451ad-a4f1-4da4-8a32-0a60388b437c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.762416] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1440.762416] env[62508]: value = "task-1775684" [ 1440.762416] env[62508]: _type = "Task" [ 1440.762416] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.771755] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775684, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.793037] env[62508]: INFO nova.compute.manager [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Took 43.06 seconds to build instance. [ 1440.859018] env[62508]: DEBUG nova.compute.manager [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Received event network-changed-bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1440.859018] env[62508]: DEBUG nova.compute.manager [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Refreshing instance network info cache due to event network-changed-bf83eb47-d009-45ec-9583-6e3d46a6f0f8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1440.859245] env[62508]: DEBUG oslo_concurrency.lockutils [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] Acquiring lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.859613] env[62508]: DEBUG oslo_concurrency.lockutils [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] Acquired lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.859806] env[62508]: DEBUG nova.network.neutron [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Refreshing network info cache for port bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1440.944694] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775678, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.961657] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7bbc7-7ca2-6190-4e3c-59fa02866206, 'name': SearchDatastore_Task, 'duration_secs': 0.039103} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.962905] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f71c246b-a3f8-4439-b8e0-96652a6d0417 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.974154] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1440.974154] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eb497-1b13-92f8-8b21-3182f3868a16" [ 1440.974154] env[62508]: _type = "Task" [ 1440.974154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.990740] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eb497-1b13-92f8-8b21-3182f3868a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.083913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.084581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.159644] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.278925] env[62508]: DEBUG nova.compute.manager [req-ddb23c6a-43c0-4ea7-9d58-99d253caf3c4 req-ef2c37d6-c3bb-483e-9e21-7f295a4ae0ca service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Received event network-vif-deleted-740df9be-fad5-483e-a52a-61e4af3c5da8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.279121] env[62508]: INFO nova.compute.manager [req-ddb23c6a-43c0-4ea7-9d58-99d253caf3c4 req-ef2c37d6-c3bb-483e-9e21-7f295a4ae0ca service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Neutron deleted interface 740df9be-fad5-483e-a52a-61e4af3c5da8; detaching it from the instance and deleting it from the info cache [ 1441.279312] env[62508]: DEBUG nova.network.neutron [req-ddb23c6a-43c0-4ea7-9d58-99d253caf3c4 req-ef2c37d6-c3bb-483e-9e21-7f295a4ae0ca service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.280754] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775684, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.298313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bae2a7e-2623-4421-ba2b-325d40b5c042 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.851s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.446653] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775678, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768865} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.449426] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5/2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.449658] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.450173] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9f26e29-ef28-47da-adb9-2ae0cb95534c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.458763] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1441.458763] env[62508]: value = "task-1775685" [ 1441.458763] env[62508]: _type = "Task" [ 1441.458763] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.467882] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.476312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3186a2-9637-4b75-ab1d-849b684f1559 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.488353] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eb497-1b13-92f8-8b21-3182f3868a16, 'name': SearchDatastore_Task, 'duration_secs': 0.062882} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.491088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.491088] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1441.491361] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-200bc4ec-0e48-4306-bdbd-2d5c94ee9a8b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.496288] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d115055-160c-4dba-b150-022b6a0e6858 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.539577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085bc2f8-ebdd-4e97-9d0b-a0441dddc651 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.542307] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1441.542307] env[62508]: value = "task-1775686" [ 1441.542307] env[62508]: _type = "Task" [ 1441.542307] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.551610] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78b5a83-7957-4996-a024-a044e2e84927 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.558988] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.569554] env[62508]: DEBUG nova.compute.provider_tree [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.585472] env[62508]: DEBUG nova.network.neutron [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.653698] env[62508]: DEBUG oslo_vmware.api [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Task: {'id': task-1775681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606272} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.653945] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1441.654221] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1441.654883] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1441.654883] env[62508]: INFO nova.compute.manager [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1441.654883] env[62508]: DEBUG oslo.service.loopingcall [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.654996] env[62508]: DEBUG nova.compute.manager [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1441.655357] env[62508]: DEBUG nova.network.neutron [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1441.775184] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775684, 'name': CreateVM_Task, 'duration_secs': 0.547157} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.775442] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1441.776974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.777035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.777537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1441.779153] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e772622a-0dea-413b-a361-a3a501851f5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.783109] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07887ccb-2d0b-4d3b-816b-0565b25af6df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.785559] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1441.785559] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e5b12c-110b-3301-cb03-293f4fefb94f" [ 1441.785559] env[62508]: _type = "Task" [ 1441.785559] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.796233] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2a5c77-8c3c-4618-bd45-6280aa27b7d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.817441] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1441.820313] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e5b12c-110b-3301-cb03-293f4fefb94f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.851825] env[62508]: DEBUG nova.compute.manager [req-ddb23c6a-43c0-4ea7-9d58-99d253caf3c4 req-ef2c37d6-c3bb-483e-9e21-7f295a4ae0ca service nova] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Detach interface failed, port_id=740df9be-fad5-483e-a52a-61e4af3c5da8, reason: Instance e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1441.967816] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099407} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.968075] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.969490] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e7e9f2-dd9d-4fc6-87bb-3ce70f35a87d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.994269] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5/2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.994621] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-952960f3-68ff-4da3-a2f8-3e5405673c0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.015640] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1442.015640] env[62508]: value = "task-1775687" [ 1442.015640] env[62508]: _type = "Task" [ 1442.015640] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.024086] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775687, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.031781] env[62508]: DEBUG nova.network.neutron [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Updated VIF entry in instance network info cache for port bf83eb47-d009-45ec-9583-6e3d46a6f0f8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.032173] env[62508]: DEBUG nova.network.neutron [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Updating instance_info_cache with network_info: [{"id": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "address": "fa:16:3e:d1:95:84", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf83eb47-d0", "ovs_interfaceid": "bf83eb47-d009-45ec-9583-6e3d46a6f0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.037165] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1442.041618] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb6c993-003d-45cd-85fa-d4373b8e4e89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.052474] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1442.052751] env[62508]: ERROR oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk due to incomplete transfer. [ 1442.056729] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f5ef6b0c-0aca-4537-a701-ee87b861c54c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.058704] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.064516] env[62508]: DEBUG oslo_vmware.rw_handles [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528cdf8b-20b8-2f71-a41d-4c5469891f05/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1442.064801] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Uploaded image 48904c02-82a2-4237-a426-b37b65d56dfa to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1442.069468] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1442.069468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2cb0b019-82dd-4d98-8622-b79c0449d219 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.073956] env[62508]: DEBUG nova.scheduler.client.report [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.077459] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1442.077459] env[62508]: value = "task-1775688" [ 1442.077459] env[62508]: _type = "Task" [ 1442.077459] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.088057] env[62508]: INFO nova.compute.manager [-] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Took 1.42 seconds to deallocate network for instance. [ 1442.088470] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775688, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.301049] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e5b12c-110b-3301-cb03-293f4fefb94f, 'name': SearchDatastore_Task, 'duration_secs': 0.045584} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.301313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.301503] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1442.301740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.301911] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.302123] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.302391] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3184e1f5-a8ee-4361-a32a-683d057555d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.313667] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.313867] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1442.314688] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b518499b-0374-49d2-850f-084c0ad4ea2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.320379] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1442.320379] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f7f2ee-b021-2ce7-3961-e5296c4c00c8" [ 1442.320379] env[62508]: _type = "Task" [ 1442.320379] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.337183] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f7f2ee-b021-2ce7-3961-e5296c4c00c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.355163] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.534245] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775687, 'name': ReconfigVM_Task, 'duration_secs': 0.41602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.534629] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5/2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.536273] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc805ceb-8c5d-4b3d-9ec3-1618c178f825 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.542547] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1442.542547] env[62508]: value = "task-1775689" [ 1442.542547] env[62508]: _type = "Task" [ 1442.542547] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.548070] env[62508]: DEBUG oslo_concurrency.lockutils [req-f03b11b5-d2a9-4dda-b16b-e8c2d1291e3b req-9cba7177-51f3-412e-9a2a-fa61e072ea5d service nova] Releasing lock "refresh_cache-38d294a9-2f51-438d-b942-a88e380a981f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.557116] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775689, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.561155] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.579403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.579997] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1442.582759] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.078s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.584333] env[62508]: INFO nova.compute.claims [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.601026] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.603259] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775688, 'name': Destroy_Task, 'duration_secs': 0.398772} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.603520] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Destroyed the VM [ 1442.604082] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1442.604235] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b3484b12-a834-4058-bf85-d08bb673c84d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.614798] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1442.614798] env[62508]: value = "task-1775691" [ 1442.614798] env[62508]: _type = "Task" [ 1442.614798] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.626216] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775691, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.748891] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.750343] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.831409] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f7f2ee-b021-2ce7-3961-e5296c4c00c8, 'name': SearchDatastore_Task, 'duration_secs': 0.018312} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.834019] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da1bedb8-cf34-4967-9bee-2b1c542dcf59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.837900] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1442.837900] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f82ae1-457b-0c67-e348-4dffe5ee5119" [ 1442.837900] env[62508]: _type = "Task" [ 1442.837900] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.848625] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f82ae1-457b-0c67-e348-4dffe5ee5119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.903351] env[62508]: DEBUG nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1442.904879] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8e5382-c295-4c7d-a5c7-18f226f3a08b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.044273] env[62508]: DEBUG nova.compute.manager [req-0470b2d2-be47-4e29-bc04-49f5796ddc63 req-1088bcad-90fc-4549-8f5e-1a9308897a49 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Received event network-vif-deleted-129ec0b1-e45e-4868-b60c-d9b307a0d56c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.044273] env[62508]: INFO nova.compute.manager [req-0470b2d2-be47-4e29-bc04-49f5796ddc63 req-1088bcad-90fc-4549-8f5e-1a9308897a49 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Neutron deleted interface 129ec0b1-e45e-4868-b60c-d9b307a0d56c; detaching it from the instance and deleting it from the info cache [ 1443.044273] env[62508]: DEBUG nova.network.neutron [req-0470b2d2-be47-4e29-bc04-49f5796ddc63 req-1088bcad-90fc-4549-8f5e-1a9308897a49 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.064148] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.064762] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775689, 'name': Rename_Task, 'duration_secs': 0.204728} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.065810] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.067086] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f543d93-8ce0-4a8a-96af-41d512ab27de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.074024] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1443.074024] env[62508]: value = "task-1775692" [ 1443.074024] env[62508]: _type = "Task" [ 1443.074024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.086321] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.098321] env[62508]: DEBUG nova.compute.utils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.100661] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1443.102016] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1443.127039] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775691, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.162297] env[62508]: DEBUG nova.policy [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a79a5afdc98410d820bebf40653a12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd58abb4cdcb74100b7c81076c7642b6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.189681] env[62508]: DEBUG nova.network.neutron [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.253216] env[62508]: DEBUG nova.compute.utils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.350866] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f82ae1-457b-0c67-e348-4dffe5ee5119, 'name': SearchDatastore_Task, 'duration_secs': 0.088681} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.351900] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.352275] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1443.352843] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19a4f703-9c60-4c3f-92cc-c2fcf2f53c8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.364772] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1443.364772] env[62508]: value = "task-1775693" [ 1443.364772] env[62508]: _type = "Task" [ 1443.364772] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.375232] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.421145] env[62508]: INFO nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] instance snapshotting [ 1443.423421] env[62508]: WARNING nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1443.426765] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d782f317-b5bc-4f2b-9414-61b7beae76e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.451177] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf404f1-c44a-4669-b7f8-174a0ef05598 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.547457] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6217bec9-92f4-4d71-8b6d-2b8fdf75070e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.560444] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29591fd5-2126-4d88-bc06-7742a31d5c0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.578398] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.593959] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775692, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.611581] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1443.614564] env[62508]: DEBUG nova.compute.manager [req-0470b2d2-be47-4e29-bc04-49f5796ddc63 req-1088bcad-90fc-4549-8f5e-1a9308897a49 service nova] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Detach interface failed, port_id=129ec0b1-e45e-4868-b60c-d9b307a0d56c, reason: Instance 73452964-d690-451d-98c3-fba3c3301c6d could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1443.617508] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Successfully created port: 8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1443.632349] env[62508]: DEBUG oslo_vmware.api [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775691, 'name': RemoveSnapshot_Task, 'duration_secs': 1.003535} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.632349] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1443.632349] env[62508]: INFO nova.compute.manager [None req-31ee1010-b04d-466f-af42-bb58958c2a0d tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 15.65 seconds to snapshot the instance on the hypervisor. [ 1443.693841] env[62508]: INFO nova.compute.manager [-] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Took 2.04 seconds to deallocate network for instance. [ 1443.758896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.777949] env[62508]: DEBUG nova.compute.manager [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-changed-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.778792] env[62508]: DEBUG nova.compute.manager [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing instance network info cache due to event network-changed-2623e6a9-42b1-4f98-9d68-a5230cdc3d79. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1443.778792] env[62508]: DEBUG oslo_concurrency.lockutils [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.778792] env[62508]: DEBUG oslo_concurrency.lockutils [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.778792] env[62508]: DEBUG nova.network.neutron [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing network info cache for port 2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1443.878680] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.965436] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1443.968495] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-550660dc-5770-4851-8c08-9ce3f944476b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.978493] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1443.978493] env[62508]: value = "task-1775694" [ 1443.978493] env[62508]: _type = "Task" [ 1443.978493] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.990950] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775694, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.068011] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.091681] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775692, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.106139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4040a6f-66ba-4728-879f-62cfcd55e42a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.117585] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac921cad-d35e-4d3c-995d-1b71b4574c0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.167019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68594992-08c0-4201-916d-ce5f1b5d0cae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.177162] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f85c739-eaf9-48cd-89f7-9b2774f63721 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.194221] env[62508]: DEBUG nova.compute.provider_tree [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.206367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.376287] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.493133] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775694, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.567658] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775686, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.735255} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.567843] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1444.568950] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581d5a14-ae4e-4d9d-a105-f980ae635f19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.599824] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1444.603708] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d76056a-2fe5-4f32-a6f7-90852f3d391f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.619783] env[62508]: DEBUG nova.network.neutron [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updated VIF entry in instance network info cache for port 2623e6a9-42b1-4f98-9d68-a5230cdc3d79. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1444.620058] env[62508]: DEBUG nova.network.neutron [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.631022] env[62508]: DEBUG oslo_vmware.api [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775692, 'name': PowerOnVM_Task, 'duration_secs': 1.097727} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.631022] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.631022] env[62508]: INFO nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Took 11.10 seconds to spawn the instance on the hypervisor. [ 1444.631022] env[62508]: DEBUG nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.631377] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1444.631377] env[62508]: value = "task-1775695" [ 1444.631377] env[62508]: _type = "Task" [ 1444.631377] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.632879] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2668eef1-4918-45c2-8b46-4798d2b1c5a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.660603] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775695, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.670592] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1444.699938] env[62508]: DEBUG nova.scheduler.client.report [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1444.706716] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.707050] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.707225] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.707410] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.707565] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.707720] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.707941] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.708129] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.708312] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.708468] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.708654] env[62508]: DEBUG nova.virt.hardware [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.710014] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264a3ae5-750c-439b-a719-3cbb7f35a2c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.721504] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9644d8-3797-4f5a-94b5-7e240cbc3d6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.869642] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.869973] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.870577] env[62508]: INFO nova.compute.manager [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Attaching volume baa10618-33f6-4bdd-877d-bdda20c03e84 to /dev/sdb [ 1444.882184] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775693, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.45862} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.884030] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1444.884715] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1444.888601] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c994b00-0ba4-41dd-8b30-d87ad8d0dce2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.898389] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1444.898389] env[62508]: value = "task-1775697" [ 1444.898389] env[62508]: _type = "Task" [ 1444.898389] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.914918] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.922673] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d54985b-7608-4d5f-abfc-3bbb9134fa1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.931575] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822677fd-c6dd-4103-9283-249a31b7e028 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.956205] env[62508]: DEBUG nova.virt.block_device [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updating existing volume attachment record: f977d809-04ac-4721-b077-5643a1ef4560 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1444.999267] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775694, 'name': CreateSnapshot_Task, 'duration_secs': 0.616191} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.999598] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1445.000514] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72b4709-9058-43b3-b654-9b43ecb27b18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.123891] env[62508]: DEBUG oslo_concurrency.lockutils [req-db5f67d8-09b9-4fc5-a134-6a34f099a3c1 req-de42aa48-82cd-4188-a9bd-a251010b877c service nova] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.149428] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775695, 'name': ReconfigVM_Task, 'duration_secs': 0.400731} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.149732] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Reconfigured VM instance instance-00000024 to attach disk [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1445.151807] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0449e9cb-6800-4a0a-91bc-ef5a37a17b93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.196592] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0e2c2b4-6144-4804-8048-d35772cba473 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.207379] env[62508]: INFO nova.compute.manager [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Took 46.61 seconds to build instance. [ 1445.214736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.215264] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1445.220554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.667s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.220554] env[62508]: DEBUG nova.objects.instance [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lazy-loading 'resources' on Instance uuid ee99ff4d-9996-4cfa-b038-7b19aef27438 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1445.220554] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1445.220554] env[62508]: value = "task-1775700" [ 1445.220554] env[62508]: _type = "Task" [ 1445.220554] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.233027] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775700, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.391741] env[62508]: DEBUG nova.compute.manager [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Received event network-vif-plugged-8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1445.391741] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] Acquiring lock "63fca45d-5922-4a14-9936-30070c349f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.391741] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] Lock "63fca45d-5922-4a14-9936-30070c349f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.391741] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] Lock "63fca45d-5922-4a14-9936-30070c349f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.392222] env[62508]: DEBUG nova.compute.manager [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] No waiting events found dispatching network-vif-plugged-8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1445.392222] env[62508]: WARNING nova.compute.manager [req-a4663452-69b1-44f6-abda-f99900f8e55a req-16d95221-e755-4faa-8c16-3db21f830d3c service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Received unexpected event network-vif-plugged-8fef6288-1754-4516-ae14-0dc489d4bb0f for instance with vm_state building and task_state spawning. [ 1445.413397] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123617} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.413703] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1445.414565] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2017c065-6bb5-4e17-8382-1a0be620948d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.447465] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.448633] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Successfully updated port: 8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1445.449944] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa95177e-2d10-4abc-a903-95141a989a65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.471488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.471488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.471488] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1445.484883] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1445.484883] env[62508]: value = "task-1775702" [ 1445.484883] env[62508]: _type = "Task" [ 1445.484883] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.496052] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.521380] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1445.522147] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4d4bc5d9-010f-4216-9b37-b8a9cefd458f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.532203] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1445.532203] env[62508]: value = "task-1775703" [ 1445.532203] env[62508]: _type = "Task" [ 1445.532203] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.541106] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775703, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.709857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54fffa2a-ea0f-4a86-b3ce-fc731dc73a38 tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.186s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.724434] env[62508]: DEBUG nova.compute.utils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.726018] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1445.727851] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1445.742764] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1445.757220] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775700, 'name': ReconfigVM_Task, 'duration_secs': 0.173372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.761415] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.762228] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0c03b1c-df6d-41dc-addd-5b3d97f8e1d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.775788] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1445.775788] env[62508]: value = "task-1775704" [ 1445.775788] env[62508]: _type = "Task" [ 1445.775788] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.568857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.569165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.569476] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.569476] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.569783] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.572241] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1446.577496] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.577750] env[62508]: WARNING oslo_vmware.common.loopingcall [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] task run outlasted interval by 0.301581 sec [ 1446.582206] env[62508]: DEBUG nova.policy [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274b6e073c6e48b69d2734ca81a7c811', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1965e796bcbd44a1be5a9c1b50698c0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.583314] env[62508]: INFO nova.compute.manager [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Terminating instance [ 1446.596641] env[62508]: DEBUG nova.compute.manager [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1446.596641] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1446.599655] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3b05dc-362c-4a38-8bcf-e791d25fbe65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.613684] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775703, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.633152] env[62508]: DEBUG oslo_vmware.api [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775704, 'name': PowerOnVM_Task, 'duration_secs': 0.578465} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.634657] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775702, 'name': ReconfigVM_Task, 'duration_secs': 0.779238} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.634657] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1446.635587] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1446.637779] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.639659] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.640586] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9580a3fd-ad80-415a-a58b-d4a3cede2d11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.644665] env[62508]: DEBUG nova.compute.manager [None req-cca4eb66-d666-4142-ba1e-532bf0d388a2 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1446.645049] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f8f4949-e2a3-42c3-83a8-271886c2d72c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.649745] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7c3ae2-dad6-4661-a49d-4827c7138945 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.675899] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1446.675899] env[62508]: value = "task-1775705" [ 1446.675899] env[62508]: _type = "Task" [ 1446.675899] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.676721] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1446.676721] env[62508]: value = "task-1775706" [ 1446.676721] env[62508]: _type = "Task" [ 1446.676721] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.700036] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775706, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.704141] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.842801] env[62508]: DEBUG nova.network.neutron [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updating instance_info_cache with network_info: [{"id": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "address": "fa:16:3e:bd:a3:de", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fef6288-17", "ovs_interfaceid": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.949017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.949017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.067797] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Successfully created port: b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.081644] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1447.097897] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775703, 'name': CloneVM_Task, 'duration_secs': 1.495519} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.102946] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Created linked-clone VM from snapshot [ 1447.105037] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef89c7ca-07f5-406a-b26c-8bb3e2369b84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.116443] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Uploading image d5779115-b6af-4def-baed-8e5bbe416621 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1447.128573] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6693b47-c0da-428a-9941-069e594b9fed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.138412] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.139049] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.139049] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.139049] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.139259] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.139309] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.139510] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.139675] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.139820] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.141694] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.141694] env[62508]: DEBUG nova.virt.hardware [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.143201] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c535b507-62dc-426b-8416-079d470118c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.150385] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.161151] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1447.161151] env[62508]: value = "vm-368656" [ 1447.161151] env[62508]: _type = "VirtualMachine" [ 1447.161151] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1447.162842] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67b5b88-7a5e-47f5-9f3a-d45cf55a3e5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.167438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24eeb57-d843-4d3c-9bb5-ebc79bb0d1e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.172585] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13aec14c-e0e2-4196-9918-b0be06153d54 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.223214] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease: (returnval){ [ 1447.223214] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520860d2-9ceb-cbe0-66fe-29cc809d5df6" [ 1447.223214] env[62508]: _type = "HttpNfcLease" [ 1447.223214] env[62508]: } obtained for exporting VM: (result){ [ 1447.223214] env[62508]: value = "vm-368656" [ 1447.223214] env[62508]: _type = "VirtualMachine" [ 1447.223214] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1447.223491] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the lease: (returnval){ [ 1447.223491] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520860d2-9ceb-cbe0-66fe-29cc809d5df6" [ 1447.223491] env[62508]: _type = "HttpNfcLease" [ 1447.223491] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1447.229019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5904f8ec-0c0d-48c3-89ad-ff5bc76010da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.238787] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775705, 'name': PowerOffVM_Task, 'duration_secs': 0.254709} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.239507] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775706, 'name': Rename_Task, 'duration_secs': 0.190547} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.244509] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1447.244509] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1447.244509] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1447.244509] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e1ea96a-a72d-4217-bca3-75c16bd2c503 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.244509] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8590800-9e10-4819-b1b5-037551cb0b7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.248452] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1447.248452] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520860d2-9ceb-cbe0-66fe-29cc809d5df6" [ 1447.248452] env[62508]: _type = "HttpNfcLease" [ 1447.248452] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1447.250760] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9079d7d-b561-4997-80f6-576987f72dc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.255236] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1447.255236] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520860d2-9ceb-cbe0-66fe-29cc809d5df6" [ 1447.255236] env[62508]: _type = "HttpNfcLease" [ 1447.255236] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1447.258681] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27eb6d5-12a2-4ebb-8bcd-90fe00e12b99 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.261739] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1447.261739] env[62508]: value = "task-1775710" [ 1447.261739] env[62508]: _type = "Task" [ 1447.261739] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.271012] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1447.271012] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1447.279923] env[62508]: DEBUG nova.compute.provider_tree [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1447.346998] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775710, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.348958] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.349335] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Instance network_info: |[{"id": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "address": "fa:16:3e:bd:a3:de", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fef6288-17", "ovs_interfaceid": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1447.350093] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:a3:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fef6288-1754-4516-ae14-0dc489d4bb0f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1447.358058] env[62508]: DEBUG oslo.service.loopingcall [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.359182] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1447.359461] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d48243b2-d93a-4777-a555-08462a4daa9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.381111] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1447.381111] env[62508]: value = "task-1775712" [ 1447.381111] env[62508]: _type = "Task" [ 1447.381111] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.390799] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775712, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.423811] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3f9ac94b-3cf0-4f4b-951f-6f5f877540f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.432730] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1447.433154] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1447.433376] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleting the datastore file [datastore1] b911f25d-711b-411e-bb2d-2e59386ff2ea {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1447.433963] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26bd3a1a-224f-444c-a51e-956cae47982b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.445041] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1447.445041] env[62508]: value = "task-1775713" [ 1447.445041] env[62508]: _type = "Task" [ 1447.445041] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.456062] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.592768] env[62508]: DEBUG nova.compute.manager [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Received event network-changed-8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1447.593106] env[62508]: DEBUG nova.compute.manager [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Refreshing instance network info cache due to event network-changed-8fef6288-1754-4516-ae14-0dc489d4bb0f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1447.593611] env[62508]: DEBUG oslo_concurrency.lockutils [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] Acquiring lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.593662] env[62508]: DEBUG oslo_concurrency.lockutils [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] Acquired lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.593815] env[62508]: DEBUG nova.network.neutron [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Refreshing network info cache for port 8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1447.605806] env[62508]: DEBUG nova.compute.manager [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1447.606942] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4474d0f-3027-4797-a978-bcafbf8e71f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.623736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.623973] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.777943] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775710, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.807861] env[62508]: ERROR nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] [req-0ce3539b-05bc-454b-948d-c3c77ee7b275] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0ce3539b-05bc-454b-948d-c3c77ee7b275"}]} [ 1447.831432] env[62508]: DEBUG nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1447.861518] env[62508]: DEBUG nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1447.861518] env[62508]: DEBUG nova.compute.provider_tree [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1447.883246] env[62508]: DEBUG nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1447.905890] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775712, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.915021] env[62508]: DEBUG nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1447.960678] env[62508]: DEBUG oslo_vmware.api [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273062} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.960981] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.961731] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.961731] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.961731] env[62508]: INFO nova.compute.manager [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1447.962029] env[62508]: DEBUG oslo.service.loopingcall [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.962225] env[62508]: DEBUG nova.compute.manager [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1447.962435] env[62508]: DEBUG nova.network.neutron [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1447.987713] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.991022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.991022] env[62508]: DEBUG nova.compute.manager [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1447.991022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e432756-72cc-41bb-9038-e29c1ae042f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.998898] env[62508]: DEBUG nova.compute.manager [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1447.999523] env[62508]: DEBUG nova.objects.instance [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'flavor' on Instance uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.126089] env[62508]: INFO nova.compute.manager [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] instance snapshotting [ 1448.130213] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dadf3f-ef03-416b-b08d-13011614598b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.170141] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b77e74-6e1e-4a66-9a92-6ac57588f6c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.281451] env[62508]: DEBUG oslo_vmware.api [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775710, 'name': PowerOnVM_Task, 'duration_secs': 0.663209} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.282319] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1448.282689] env[62508]: INFO nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1448.282901] env[62508]: DEBUG nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1448.283856] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c69865-e049-4d5d-93e9-9888c1ad1e4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.400065] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775712, 'name': CreateVM_Task, 'duration_secs': 0.592466} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.400364] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.401254] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.401463] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.401878] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.402223] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c47c4d-2ee1-4cf4-b2b5-f1f1e6945e3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.412154] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1448.412154] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264f7e4-37fe-13a4-174b-5d0341a7edba" [ 1448.412154] env[62508]: _type = "Task" [ 1448.412154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.421773] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264f7e4-37fe-13a4-174b-5d0341a7edba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.427742] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886a0026-a78e-4812-866a-ea024d492e5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.437056] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc42f489-cb4f-42ea-8673-9f9e74e35768 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.473722] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0486e9c3-511c-417c-bd9b-dcc35fa35c64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.482832] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ea9548-a51d-4bae-ab78-28de4e25a0d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.501871] env[62508]: DEBUG nova.compute.provider_tree [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.509062] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.509203] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad13b1a9-92be-4e3a-be82-ae4e658677cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.518988] env[62508]: DEBUG oslo_vmware.api [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1448.518988] env[62508]: value = "task-1775714" [ 1448.518988] env[62508]: _type = "Task" [ 1448.518988] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.530623] env[62508]: DEBUG oslo_vmware.api [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.690725] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1448.691321] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d56a8e65-97bb-47c6-b1ec-d5b283c6d6a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.703819] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1448.703819] env[62508]: value = "task-1775715" [ 1448.703819] env[62508]: _type = "Task" [ 1448.703819] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.718896] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775715, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.800011] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Successfully updated port: b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.810672] env[62508]: INFO nova.compute.manager [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Took 45.55 seconds to build instance. [ 1448.821049] env[62508]: DEBUG nova.network.neutron [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updated VIF entry in instance network info cache for port 8fef6288-1754-4516-ae14-0dc489d4bb0f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.821588] env[62508]: DEBUG nova.network.neutron [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updating instance_info_cache with network_info: [{"id": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "address": "fa:16:3e:bd:a3:de", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fef6288-17", "ovs_interfaceid": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.925525] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264f7e4-37fe-13a4-174b-5d0341a7edba, 'name': SearchDatastore_Task, 'duration_secs': 0.022035} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.926012] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.926323] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.926642] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.926877] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.927197] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.927713] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9836aea-ce14-4a12-9e6d-47ad45fef03d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.939724] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.939991] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1448.940811] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd15e707-ea17-43ca-bdd7-faa2729d9762 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.948311] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1448.948311] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52782445-fd91-deb6-e009-e00e5df8c0bd" [ 1448.948311] env[62508]: _type = "Task" [ 1448.948311] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.960363] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52782445-fd91-deb6-e009-e00e5df8c0bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.009619] env[62508]: DEBUG nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1449.030710] env[62508]: DEBUG oslo_vmware.api [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775714, 'name': PowerOffVM_Task, 'duration_secs': 0.345836} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.034020] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.034020] env[62508]: DEBUG nova.compute.manager [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1449.034020] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110fd3f9-9561-4379-b55e-402d4977edc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.150948] env[62508]: DEBUG nova.compute.manager [req-d0dc21a8-1401-48dd-933c-bea2caca34ab req-7600a676-3747-436a-8ceb-127f93f9da7f service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Received event network-vif-deleted-b7ef30a6-939d-4546-9597-db2b4cc755ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1449.150948] env[62508]: INFO nova.compute.manager [req-d0dc21a8-1401-48dd-933c-bea2caca34ab req-7600a676-3747-436a-8ceb-127f93f9da7f service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Neutron deleted interface b7ef30a6-939d-4546-9597-db2b4cc755ae; detaching it from the instance and deleting it from the info cache [ 1449.150948] env[62508]: DEBUG nova.network.neutron [req-d0dc21a8-1401-48dd-933c-bea2caca34ab req-7600a676-3747-436a-8ceb-127f93f9da7f service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.214910] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775715, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.269379] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.270578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.270905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.271228] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.271407] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.273781] env[62508]: INFO nova.compute.manager [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Terminating instance [ 1449.279375] env[62508]: DEBUG nova.compute.manager [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1449.279482] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1449.280314] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae24646-a382-43c9-bfab-979f0c6be527 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.289232] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1449.289563] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ad58df2-8a13-431a-9334-6537190c2b3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.298156] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1449.298156] env[62508]: value = "task-1775717" [ 1449.298156] env[62508]: _type = "Task" [ 1449.298156] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.298834] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.299069] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.299281] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.313681] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.314130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-51eb029f-1d05-4759-96a5-4c142ed04b41 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.335s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.326162] env[62508]: DEBUG oslo_concurrency.lockutils [req-49648f56-4fc1-4827-9485-69a4b920db0f req-9457a5c2-306f-4873-9c64-b311e808d589 service nova] Releasing lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.461813] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52782445-fd91-deb6-e009-e00e5df8c0bd, 'name': SearchDatastore_Task, 'duration_secs': 0.012046} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.462878] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4df4fc5a-1135-42cf-a0e8-e6290262d919 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.470554] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1449.470554] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280ceb5-97a7-8d37-ea57-2a641c0c3216" [ 1449.470554] env[62508]: _type = "Task" [ 1449.470554] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.482604] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280ceb5-97a7-8d37-ea57-2a641c0c3216, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.518637] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.299s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.520751] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.208s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.521260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.521326] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1449.521787] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.520s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.523550] env[62508]: INFO nova.compute.claims [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1449.527293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f395e23-1826-4b2e-ae7d-2f670fb77995 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.537813] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f2f878-53cc-4aa5-b8da-0cedaa711e01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.543443] env[62508]: INFO nova.scheduler.client.report [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Deleted allocations for instance ee99ff4d-9996-4cfa-b038-7b19aef27438 [ 1449.561041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2d17d681-4d68-4c8b-9b39-b44fd84f90ff tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.573s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.563128] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1449.564130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368655', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'name': 'volume-baa10618-33f6-4bdd-877d-bdda20c03e84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d23d8f0-d7a9-4236-ad28-208e77b72138', 'attached_at': '', 'detached_at': '', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'serial': 'baa10618-33f6-4bdd-877d-bdda20c03e84'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1449.564325] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5a24d9-13b8-4ac5-bd38-80054052ff30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.567978] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0a25fc-cbb9-4b3f-aa44-5f92403685ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.592878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42a342a-c531-4fe6-b69f-a41b4a7c3345 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.597151] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c564a7-f27e-437b-99be-931003a26679 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.630518] env[62508]: DEBUG nova.network.neutron [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.635029] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179555MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1449.635029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.654625] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-baa10618-33f6-4bdd-877d-bdda20c03e84/volume-baa10618-33f6-4bdd-877d-bdda20c03e84.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1449.655618] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45ae91de-277d-4ca4-9df9-a3b16a5c04bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.669921] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b950fabc-d5ad-426e-ae87-134fde15ad56 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.673401] env[62508]: DEBUG nova.compute.manager [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Received event network-vif-plugged-b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1449.673708] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Acquiring lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.674044] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.674301] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.674547] env[62508]: DEBUG nova.compute.manager [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] No waiting events found dispatching network-vif-plugged-b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1449.674920] env[62508]: WARNING nova.compute.manager [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Received unexpected event network-vif-plugged-b63a3fdb-4813-40cc-8ee8-6478ef3e9640 for instance with vm_state building and task_state spawning. [ 1449.675103] env[62508]: DEBUG nova.compute.manager [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Received event network-changed-b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1449.675349] env[62508]: DEBUG nova.compute.manager [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Refreshing instance network info cache due to event network-changed-b63a3fdb-4813-40cc-8ee8-6478ef3e9640. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1449.675596] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Acquiring lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.680528] env[62508]: INFO nova.compute.manager [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Rescuing [ 1449.680858] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.681258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.681467] env[62508]: DEBUG nova.network.neutron [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.685929] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Waiting for the task: (returnval){ [ 1449.685929] env[62508]: value = "task-1775718" [ 1449.685929] env[62508]: _type = "Task" [ 1449.685929] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.694493] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd768b4-f0db-439f-a621-c043e5aaa26f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.735083] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.746398] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775715, 'name': CreateSnapshot_Task, 'duration_secs': 1.017323} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.763355] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1449.764875] env[62508]: DEBUG nova.compute.manager [req-d0dc21a8-1401-48dd-933c-bea2caca34ab req-7600a676-3747-436a-8ceb-127f93f9da7f service nova] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Detach interface failed, port_id=b7ef30a6-939d-4546-9597-db2b4cc755ae, reason: Instance b911f25d-711b-411e-bb2d-2e59386ff2ea could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1449.765076] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9bb943-6432-4fe9-bfc1-27ce23420f0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.811069] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775717, 'name': PowerOffVM_Task, 'duration_secs': 0.282734} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.811687] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.811894] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1449.812798] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6dad28c-3bc8-4d22-8636-993384da55ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.816315] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1449.857331] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1449.980072] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.980453] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.980787] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Deleting the datastore file [datastore1] 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.981780] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad25ee37-a86a-4451-b1c2-1db3a2da0209 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.989188] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280ceb5-97a7-8d37-ea57-2a641c0c3216, 'name': SearchDatastore_Task, 'duration_secs': 0.013734} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.990060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.990596] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/63fca45d-5922-4a14-9936-30070c349f8e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.990832] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc2cab20-8f57-43f9-a75b-e25c419465f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.995811] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for the task: (returnval){ [ 1449.995811] env[62508]: value = "task-1775720" [ 1449.995811] env[62508]: _type = "Task" [ 1449.995811] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.003168] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1450.003168] env[62508]: value = "task-1775721" [ 1450.003168] env[62508]: _type = "Task" [ 1450.003168] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.015900] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.023200] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.080300] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ad2e6646-9999-4e6e-83f6-bcb0690cc0cd tempest-FloatingIPsAssociationTestJSON-1336337907 tempest-FloatingIPsAssociationTestJSON-1336337907-project-member] Lock "ee99ff4d-9996-4cfa-b038-7b19aef27438" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.989s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.085306] env[62508]: DEBUG nova.network.neutron [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Updating instance_info_cache with network_info: [{"id": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "address": "fa:16:3e:2e:81:ac", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb63a3fdb-48", "ovs_interfaceid": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.146686] env[62508]: INFO nova.compute.manager [-] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Took 2.18 seconds to deallocate network for instance. [ 1450.207279] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.263690] env[62508]: INFO nova.compute.manager [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Rebuilding instance [ 1450.292418] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1450.295597] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0183d1a1-1fc9-4136-b537-dc2fcb43a908 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.315113] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1450.315113] env[62508]: value = "task-1775722" [ 1450.315113] env[62508]: _type = "Task" [ 1450.315113] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.330769] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775722, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.350719] env[62508]: DEBUG nova.compute.manager [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1450.352428] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.354075] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10d1bad-940a-42a2-af83-d8f60a05699d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.509569] env[62508]: DEBUG oslo_vmware.api [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Task: {'id': task-1775720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181589} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.513745] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.514021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1450.514359] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1450.514565] env[62508]: INFO nova.compute.manager [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1450.514844] env[62508]: DEBUG oslo.service.loopingcall [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.515099] env[62508]: DEBUG nova.compute.manager [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1450.515184] env[62508]: DEBUG nova.network.neutron [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1450.523707] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775721, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.588700] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.589060] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Instance network_info: |[{"id": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "address": "fa:16:3e:2e:81:ac", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb63a3fdb-48", "ovs_interfaceid": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1450.589489] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Acquired lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.589685] env[62508]: DEBUG nova.network.neutron [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Refreshing network info cache for port b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.591476] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:81:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b63a3fdb-4813-40cc-8ee8-6478ef3e9640', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.599338] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Creating folder: Project (1965e796bcbd44a1be5a9c1b50698c0d). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.603659] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38b11f38-1043-43d9-b270-ac9b7def3e8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.617589] env[62508]: DEBUG nova.network.neutron [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Updating instance_info_cache with network_info: [{"id": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "address": "fa:16:3e:fa:b1:10", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985905ec-2a", "ovs_interfaceid": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.622220] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Created folder: Project (1965e796bcbd44a1be5a9c1b50698c0d) in parent group-v368536. [ 1450.622220] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Creating folder: Instances. Parent ref: group-v368661. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.622220] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f2443a4-df20-4f83-b23d-12c68a7494be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.639322] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Created folder: Instances in parent group-v368661. [ 1450.639322] env[62508]: DEBUG oslo.service.loopingcall [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.639589] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1450.639979] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c342af3a-6152-4f5d-b0b7-95419f5b01c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.659703] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.668020] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.668020] env[62508]: value = "task-1775725" [ 1450.668020] env[62508]: _type = "Task" [ 1450.668020] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.679515] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775725, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.710997] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775718, 'name': ReconfigVM_Task, 'duration_secs': 0.674064} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.714303] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-baa10618-33f6-4bdd-877d-bdda20c03e84/volume-baa10618-33f6-4bdd-877d-bdda20c03e84.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1450.721336] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad01790b-e735-4655-b2fa-93f70a2642ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.742621] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Waiting for the task: (returnval){ [ 1450.742621] env[62508]: value = "task-1775726" [ 1450.742621] env[62508]: _type = "Task" [ 1450.742621] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.754766] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.760851] env[62508]: DEBUG nova.objects.instance [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'flavor' on Instance uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.828777] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775722, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.869029] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1450.869158] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e5c1e2a-9e73-46a8-8b27-096182d7ffe7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.880896] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1450.880896] env[62508]: value = "task-1775727" [ 1450.880896] env[62508]: _type = "Task" [ 1450.880896] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.892869] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.020215] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775721, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544242} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.023519] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/63fca45d-5922-4a14-9936-30070c349f8e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1451.023800] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1451.024371] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-998b78d6-4612-4e9b-80e0-d58ff786368f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.034632] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1451.034632] env[62508]: value = "task-1775729" [ 1451.034632] env[62508]: _type = "Task" [ 1451.034632] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.054911] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.071190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19c5815-ce33-4e0c-a57c-38d34b677a2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.081018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4d8a02-5f97-4dcc-ba8d-854169cc256f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.128598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.131989] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1397d11c-264f-4023-99d8-e3009004f3b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.143924] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c68ad45-cd5f-44c1-8da4-4faf7a4fb65b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.167555] env[62508]: DEBUG nova.compute.provider_tree [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.183792] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775725, 'name': CreateVM_Task, 'duration_secs': 0.473647} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.184060] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.184811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.185022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.185394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.185693] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54d993fa-c421-47e4-a9c1-3d3619274614 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.189903] env[62508]: DEBUG nova.compute.manager [req-7bde30ce-94c1-43ff-a485-7aed3309ebf3 req-8d0d2595-a93b-414f-8a14-900f91103833 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Received event network-vif-deleted-49925acf-7ad5-4349-bc32-67f5ba20e54d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1451.190160] env[62508]: INFO nova.compute.manager [req-7bde30ce-94c1-43ff-a485-7aed3309ebf3 req-8d0d2595-a93b-414f-8a14-900f91103833 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Neutron deleted interface 49925acf-7ad5-4349-bc32-67f5ba20e54d; detaching it from the instance and deleting it from the info cache [ 1451.190356] env[62508]: DEBUG nova.network.neutron [req-7bde30ce-94c1-43ff-a485-7aed3309ebf3 req-8d0d2595-a93b-414f-8a14-900f91103833 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.196470] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1451.196470] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fd11c1-0f78-a802-20bd-cb9598f3dd42" [ 1451.196470] env[62508]: _type = "Task" [ 1451.196470] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.208444] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fd11c1-0f78-a802-20bd-cb9598f3dd42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.255059] env[62508]: DEBUG oslo_vmware.api [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775726, 'name': ReconfigVM_Task, 'duration_secs': 0.198843} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.255511] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368655', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'name': 'volume-baa10618-33f6-4bdd-877d-bdda20c03e84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d23d8f0-d7a9-4236-ad28-208e77b72138', 'attached_at': '', 'detached_at': '', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'serial': 'baa10618-33f6-4bdd-877d-bdda20c03e84'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1451.270183] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.270183] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.270183] env[62508]: DEBUG nova.network.neutron [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1451.270183] env[62508]: DEBUG nova.objects.instance [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'info_cache' on Instance uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1451.329259] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775722, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.335077] env[62508]: DEBUG nova.network.neutron [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.391920] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775727, 'name': PowerOffVM_Task, 'duration_secs': 0.417558} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.392299] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1451.392539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1451.393389] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518580c5-28dd-49d9-a2b3-e2930d579ec8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.402307] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1451.404351] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b32041a3-3af4-477e-9ac7-ecbfbfc46253 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.548724] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113369} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.549053] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1451.549883] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78351f6c-44d6-4cc3-8e9a-654d962f6a46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.577289] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/63fca45d-5922-4a14-9936-30070c349f8e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1451.577693] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a507d54b-2249-4e3d-97d5-5babd6adf913 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.595550] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1451.595550] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1451.595550] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1451.600287] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8314de6e-c204-4714-bfc9-9d95bddea3df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.603096] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1451.603096] env[62508]: value = "task-1775731" [ 1451.603096] env[62508]: _type = "Task" [ 1451.603096] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.608975] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1451.608975] env[62508]: value = "task-1775732" [ 1451.608975] env[62508]: _type = "Task" [ 1451.608975] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.612719] env[62508]: DEBUG nova.network.neutron [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Updated VIF entry in instance network info cache for port b63a3fdb-4813-40cc-8ee8-6478ef3e9640. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.612719] env[62508]: DEBUG nova.network.neutron [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Updating instance_info_cache with network_info: [{"id": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "address": "fa:16:3e:2e:81:ac", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb63a3fdb-48", "ovs_interfaceid": "b63a3fdb-4813-40cc-8ee8-6478ef3e9640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.620085] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.626708] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.673687] env[62508]: DEBUG nova.scheduler.client.report [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.681641] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1451.682158] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-071b60b0-16d9-4689-8f93-63160694fe48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.695279] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5936eb7-759a-4fc8-b686-67319d70fcc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.698148] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1451.698148] env[62508]: value = "task-1775733" [ 1451.698148] env[62508]: _type = "Task" [ 1451.698148] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.711843] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d39527d-d9ac-43aa-bd32-6c4277539fcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.737719] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.737719] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fd11c1-0f78-a802-20bd-cb9598f3dd42, 'name': SearchDatastore_Task, 'duration_secs': 0.014597} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.737719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.737719] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1451.737719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.737719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.737719] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1451.737719] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ba5c65d-4e0c-425c-91f4-f38795fae78d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.749541] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1451.749903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1451.768038] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7278fef8-f9c1-4e7d-9d8c-4e90e53f18f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.773951] env[62508]: DEBUG nova.compute.manager [req-7bde30ce-94c1-43ff-a485-7aed3309ebf3 req-8d0d2595-a93b-414f-8a14-900f91103833 service nova] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Detach interface failed, port_id=49925acf-7ad5-4349-bc32-67f5ba20e54d, reason: Instance 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1451.776070] env[62508]: DEBUG nova.objects.base [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Object Instance<95a289ac-3178-45ea-80d2-905b9af54f3c> lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1451.783054] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1451.783054] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b78984-622c-235e-97b2-53341c358d81" [ 1451.783054] env[62508]: _type = "Task" [ 1451.783054] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.793106] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b78984-622c-235e-97b2-53341c358d81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.831574] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775722, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.836618] env[62508]: INFO nova.compute.manager [-] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Took 1.32 seconds to deallocate network for instance. [ 1452.119490] env[62508]: DEBUG oslo_concurrency.lockutils [req-1e7d5404-e0ce-499c-9f2d-cbd49a22f888 req-f494b20d-2639-4345-8bc5-719fae50c372 service nova] Releasing lock "refresh_cache-9a3ef326-0fbf-4fd2-bb5e-3009bf661381" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.120173] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775731, 'name': ReconfigVM_Task, 'duration_secs': 0.390929} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.124159] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/63fca45d-5922-4a14-9936-30070c349f8e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.124867] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-049711b0-6156-40d7-a453-408f80ed1311 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.132517] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160709} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.133933] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1452.134161] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1452.134358] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1452.137278] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1452.137278] env[62508]: value = "task-1775734" [ 1452.137278] env[62508]: _type = "Task" [ 1452.137278] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.147941] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775734, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.182380] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.182990] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1452.185664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.188000] env[62508]: INFO nova.compute.claims [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.209017] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775733, 'name': PowerOffVM_Task, 'duration_secs': 0.194468} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.209927] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1452.211150] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531e3c32-44f7-4f32-86f1-5c955a4afff6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.233410] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef22aea-f71e-439c-a50c-a4060dcb97c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.267150] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.267490] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f5ebff3-eba7-49c7-b6bf-344ae3492ba8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.276112] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1452.276112] env[62508]: value = "task-1775735" [ 1452.276112] env[62508]: _type = "Task" [ 1452.276112] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.289474] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1452.289751] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.289998] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.294713] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b78984-622c-235e-97b2-53341c358d81, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.295530] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2412eba-8766-4547-bb78-02e0006c94b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.302325] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1452.302325] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52185fae-3d07-b94e-5ad1-91e08b20c0fd" [ 1452.302325] env[62508]: _type = "Task" [ 1452.302325] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.311597] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52185fae-3d07-b94e-5ad1-91e08b20c0fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.320778] env[62508]: DEBUG nova.objects.instance [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lazy-loading 'flavor' on Instance uuid 7d23d8f0-d7a9-4236-ad28-208e77b72138 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1452.334835] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775722, 'name': CloneVM_Task, 'duration_secs': 1.86695} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.334948] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Created linked-clone VM from snapshot [ 1452.335799] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9bf918-17e7-4c01-8099-21ff6c6f05b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.344551] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Uploading image 8584a906-3096-4f94-aa62-d77ab0fc45bb {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1452.349591] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.402582] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1452.402582] env[62508]: value = "vm-368660" [ 1452.402582] env[62508]: _type = "VirtualMachine" [ 1452.402582] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1452.405315] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-581bf1dd-ee44-42bb-b27b-30a7c0feb396 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.413207] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lease: (returnval){ [ 1452.413207] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8e7f8-738f-7390-9285-e6fdf6e0d646" [ 1452.413207] env[62508]: _type = "HttpNfcLease" [ 1452.413207] env[62508]: } obtained for exporting VM: (result){ [ 1452.413207] env[62508]: value = "vm-368660" [ 1452.413207] env[62508]: _type = "VirtualMachine" [ 1452.413207] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1452.413513] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the lease: (returnval){ [ 1452.413513] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8e7f8-738f-7390-9285-e6fdf6e0d646" [ 1452.413513] env[62508]: _type = "HttpNfcLease" [ 1452.413513] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1452.420654] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1452.420654] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8e7f8-738f-7390-9285-e6fdf6e0d646" [ 1452.420654] env[62508]: _type = "HttpNfcLease" [ 1452.420654] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1452.556228] env[62508]: DEBUG nova.network.neutron [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.667823] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775734, 'name': Rename_Task, 'duration_secs': 0.156756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.667823] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.667823] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45a5b68b-794f-4c6f-b2bf-9df2070f099a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.673250] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1452.673250] env[62508]: value = "task-1775737" [ 1452.673250] env[62508]: _type = "Task" [ 1452.673250] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.685422] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.692336] env[62508]: DEBUG nova.compute.utils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.698112] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1452.698112] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1452.770132] env[62508]: DEBUG nova.policy [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bebc8625d4842c3b630da112442bcbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf3fdb58653a47149b5ae7316424d235', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1452.815408] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52185fae-3d07-b94e-5ad1-91e08b20c0fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.815753] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.816026] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9a3ef326-0fbf-4fd2-bb5e-3009bf661381/9a3ef326-0fbf-4fd2-bb5e-3009bf661381.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1452.816316] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.816501] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.816725] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce4fabed-2cbb-4b9e-94c4-37520d67551f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.820115] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ce9b006-f42d-4387-9158-025c3710d23f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.833970] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1452.833970] env[62508]: value = "task-1775738" [ 1452.833970] env[62508]: _type = "Task" [ 1452.833970] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.834282] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.834282] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.835571] env[62508]: DEBUG oslo_concurrency.lockutils [None req-140c6c7e-b865-4d26-94d0-3085bcee1438 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.966s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.836829] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd0b56cb-13b8-432c-ad48-faa26c54018f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.848182] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1452.848182] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524ed74f-f175-108e-799b-367dea723ac3" [ 1452.848182] env[62508]: _type = "Task" [ 1452.848182] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.851566] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775738, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.864174] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524ed74f-f175-108e-799b-367dea723ac3, 'name': SearchDatastore_Task, 'duration_secs': 0.010043} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.864964] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa0d03a-1c44-48f1-9679-b9604b732b30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.873100] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1452.873100] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a2844e-f18c-329e-2f13-a7c7ec830ad9" [ 1452.873100] env[62508]: _type = "Task" [ 1452.873100] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.885202] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a2844e-f18c-329e-2f13-a7c7ec830ad9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.923611] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1452.923611] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8e7f8-738f-7390-9285-e6fdf6e0d646" [ 1452.923611] env[62508]: _type = "HttpNfcLease" [ 1452.923611] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1452.924030] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1452.924030] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8e7f8-738f-7390-9285-e6fdf6e0d646" [ 1452.924030] env[62508]: _type = "HttpNfcLease" [ 1452.924030] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1452.925783] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f6257f-58d8-42bf-9d99-b8b72d8702fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.935032] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1452.935251] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1453.062576] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.067679] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f41e5d23-413d-4f41-a2ae-e30f5453fff1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.186278] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775737, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.188136] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Successfully created port: cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1453.204233] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1453.334774] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1453.335127] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1453.335756] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1453.336063] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1453.336521] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1453.337265] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1453.337550] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1453.337793] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1453.337963] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1453.340187] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1453.340407] env[62508]: DEBUG nova.virt.hardware [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1453.342098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334b8218-6640-4ad0-beb9-6cdca22b0f4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.361536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c18d4e6-6a51-4ee1-88f3-7565acc3a876 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.365284] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775738, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.377947] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:95:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf83eb47-d009-45ec-9583-6e3d46a6f0f8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.385584] env[62508]: DEBUG oslo.service.loopingcall [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.392111] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1453.392687] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-619f3b0c-84f9-4c58-8d5e-951ba47dfa68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.419759] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.419759] env[62508]: value = "task-1775740" [ 1453.419759] env[62508]: _type = "Task" [ 1453.419759] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.419759] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a2844e-f18c-329e-2f13-a7c7ec830ad9, 'name': SearchDatastore_Task, 'duration_secs': 0.011125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.419759] env[62508]: DEBUG oslo_concurrency.lockutils [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.420332] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1453.424509] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-693e1402-f63e-4948-a01f-3e473c8b21a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.437308] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775740, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.439706] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1453.439706] env[62508]: value = "task-1775741" [ 1453.439706] env[62508]: _type = "Task" [ 1453.439706] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.454119] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.568839] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.569221] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39eeeb96-2de7-443d-affa-ac94f584a82a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.586294] env[62508]: DEBUG oslo_vmware.api [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1453.586294] env[62508]: value = "task-1775742" [ 1453.586294] env[62508]: _type = "Task" [ 1453.586294] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.599080] env[62508]: DEBUG oslo_vmware.api [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.691585] env[62508]: DEBUG oslo_vmware.api [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775737, 'name': PowerOnVM_Task, 'duration_secs': 0.845013} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.691961] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.692634] env[62508]: INFO nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1453.692728] env[62508]: DEBUG nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1453.694218] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b77e1e0-cc30-4eb7-88d3-d04bbd642999 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.853194] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775738, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529566} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.856401] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9a3ef326-0fbf-4fd2-bb5e-3009bf661381/9a3ef326-0fbf-4fd2-bb5e-3009bf661381.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1453.856711] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1453.857384] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6edc4e7c-89f6-4d0d-bc00-a02df519186a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.867299] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1453.867299] env[62508]: value = "task-1775743" [ 1453.867299] env[62508]: _type = "Task" [ 1453.867299] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.881153] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.929348] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2b773c-5fdd-4ea7-bd27-ed7d9bcfbdd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.938219] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775740, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.950319] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80238645-3ca9-4805-b35d-b3adbff2240f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.964159] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510364} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.967112] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1453.968342] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d356b5b4-d942-49f5-b8a1-4bdc5ca09f83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.004429] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4589006-5617-4d39-999d-30236c7f4c68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.032124] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.033623] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7dd7596-81f5-44df-b481-09ff1542719e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.055337] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f0961e-151e-4c3e-9b0b-c5e8c56d6306 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.061387] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1454.061387] env[62508]: value = "task-1775744" [ 1454.061387] env[62508]: _type = "Task" [ 1454.061387] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.075846] env[62508]: DEBUG nova.compute.provider_tree [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.086656] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775744, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.088335] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.088656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.104635] env[62508]: DEBUG oslo_vmware.api [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775742, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.219219] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1454.232013] env[62508]: INFO nova.compute.manager [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Took 44.93 seconds to build instance. [ 1454.381058] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113326} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.381805] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.383052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f6f987-2513-433d-ab77-56e4ce08b19e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.411044] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 9a3ef326-0fbf-4fd2-bb5e-3009bf661381/9a3ef326-0fbf-4fd2-bb5e-3009bf661381.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.411615] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ff8d9b6-5095-4099-8756-024f949211d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.438343] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775740, 'name': CreateVM_Task, 'duration_secs': 0.747044} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.440147] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1454.440588] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1454.440588] env[62508]: value = "task-1775745" [ 1454.440588] env[62508]: _type = "Task" [ 1454.440588] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.441339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.441554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.441905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1454.442322] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46765711-16d5-4ac4-972a-3f8fa3446995 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.453742] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1454.453742] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5222b25a-0164-37f4-3495-01dabeec59fe" [ 1454.453742] env[62508]: _type = "Task" [ 1454.453742] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.457953] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775745, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.468947] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5222b25a-0164-37f4-3495-01dabeec59fe, 'name': SearchDatastore_Task, 'duration_secs': 0.011862} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.469424] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.469752] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.470111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.470352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.470629] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1454.471032] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f781787-347e-4f9d-be87-df4e5099dba2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.480268] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1454.480466] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1454.481217] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-692878a5-3501-4908-83fc-d19e620030b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.487572] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1454.487572] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ab92f8-553a-8c6c-2df8-616bc90d89fe" [ 1454.487572] env[62508]: _type = "Task" [ 1454.487572] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.497273] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ab92f8-553a-8c6c-2df8-616bc90d89fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.576252] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.580320] env[62508]: DEBUG nova.scheduler.client.report [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1454.597366] env[62508]: INFO nova.compute.manager [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Detaching volume baa10618-33f6-4bdd-877d-bdda20c03e84 [ 1454.603678] env[62508]: DEBUG oslo_vmware.api [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775742, 'name': PowerOnVM_Task, 'duration_secs': 0.575381} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.606518] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.606810] env[62508]: DEBUG nova.compute.manager [None req-b4e57e3d-cb1a-4574-9641-67a344d9f7de tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.608196] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e3df26-498a-4ade-98d9-d871a496c119 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.639110] env[62508]: INFO nova.virt.block_device [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Attempting to driver detach volume baa10618-33f6-4bdd-877d-bdda20c03e84 from mountpoint /dev/sdb [ 1454.639448] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1454.639730] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368655', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'name': 'volume-baa10618-33f6-4bdd-877d-bdda20c03e84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d23d8f0-d7a9-4236-ad28-208e77b72138', 'attached_at': '', 'detached_at': '', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'serial': 'baa10618-33f6-4bdd-877d-bdda20c03e84'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1454.640710] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1211c452-0e1d-4cee-8108-45cfc7958194 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.666888] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2731e7de-f556-496b-ac1f-1e90b6bcf413 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.679189] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b991742d-3e75-42df-8c37-b28adc261e45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.702438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a62330-6028-4dd0-9f9c-a851620136cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.720543] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] The volume has not been displaced from its original location: [datastore1] volume-baa10618-33f6-4bdd-877d-bdda20c03e84/volume-baa10618-33f6-4bdd-877d-bdda20c03e84.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1454.726434] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfiguring VM instance instance-00000010 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1454.726727] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-034b45e0-787b-4b28-9266-3fded6ad01cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.740566] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ae193b83-a06d-485f-bd01-7e747880f7dc tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.111s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.748969] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Waiting for the task: (returnval){ [ 1454.748969] env[62508]: value = "task-1775746" [ 1454.748969] env[62508]: _type = "Task" [ 1454.748969] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.760083] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775746, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.953117] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775745, 'name': ReconfigVM_Task, 'duration_secs': 0.329909} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.953547] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 9a3ef326-0fbf-4fd2-bb5e-3009bf661381/9a3ef326-0fbf-4fd2-bb5e-3009bf661381.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1454.954306] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-724fe2c1-7475-4393-9cde-3f6e108106f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.962755] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1454.962755] env[62508]: value = "task-1775747" [ 1454.962755] env[62508]: _type = "Task" [ 1454.962755] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.973987] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775747, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.998744] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ab92f8-553a-8c6c-2df8-616bc90d89fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01017} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.999996] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229748ec-fc61-426b-877e-03a499bb1c06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.006804] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1455.006804] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b3418c-700f-b7f7-d75a-6e8c7fe059ab" [ 1455.006804] env[62508]: _type = "Task" [ 1455.006804] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.016319] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b3418c-700f-b7f7-d75a-6e8c7fe059ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.064742] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Successfully updated port: cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.080127] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775744, 'name': ReconfigVM_Task, 'duration_secs': 0.707293} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.081407] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.082754] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0517f429-f712-4454-b500-f669012321d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.087817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.902s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.088453] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1455.092296] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.788s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.094444] env[62508]: INFO nova.compute.claims [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1455.137065] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7743671c-b83c-44fe-84c4-73da6d5da19c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.155418] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1455.155418] env[62508]: value = "task-1775748" [ 1455.155418] env[62508]: _type = "Task" [ 1455.155418] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.164833] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775748, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.243134] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1455.260505] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775746, 'name': ReconfigVM_Task, 'duration_secs': 0.401057} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.260664] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Reconfigured VM instance instance-00000010 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1455.265617] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c504c4c-3517-4169-b378-1bbb0284bcb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.283015] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Waiting for the task: (returnval){ [ 1455.283015] env[62508]: value = "task-1775749" [ 1455.283015] env[62508]: _type = "Task" [ 1455.283015] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.293293] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.474312] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775747, 'name': Rename_Task, 'duration_secs': 0.180677} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.474743] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.475118] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d61aa475-c44c-4d98-a7d1-3c3a1598f48f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.483166] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1455.483166] env[62508]: value = "task-1775750" [ 1455.483166] env[62508]: _type = "Task" [ 1455.483166] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.493387] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.517733] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b3418c-700f-b7f7-d75a-6e8c7fe059ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010004} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.517980] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.518376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1455.518764] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8432e222-32a4-42ad-b7f1-5b667a1b5859 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.527201] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1455.527201] env[62508]: value = "task-1775751" [ 1455.527201] env[62508]: _type = "Task" [ 1455.527201] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.537076] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.572733] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.572966] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.573151] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1455.601617] env[62508]: DEBUG nova.compute.utils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.605248] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1455.605453] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1455.667098] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775748, 'name': ReconfigVM_Task, 'duration_secs': 0.268704} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.667476] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.667851] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4dd77ce0-4312-4969-93dd-acb3d4c6d561 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.675281] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1455.675281] env[62508]: value = "task-1775752" [ 1455.675281] env[62508]: _type = "Task" [ 1455.675281] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.680377] env[62508]: DEBUG nova.policy [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7de47c22ac8743b7b733dbb97b5bf42d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df92e9d833ff45b99be76310e6bda526', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1455.687938] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.771823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.796091] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775749, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.999258] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775750, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.044308] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775751, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.105949] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1456.132370] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.136330] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Successfully created port: e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1456.191337] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775752, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.299915] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775749, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.413885] env[62508]: DEBUG nova.network.neutron [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.462065] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:10:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='192acf26-1962-4b3a-b461-037bee820f70',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1531657240',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1456.462291] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1456.462482] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.463130] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1456.463130] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.463130] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1456.463430] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1456.463430] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1456.463610] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1456.464340] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1456.464340] env[62508]: DEBUG nova.virt.hardware [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1456.466463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522e8765-8474-45dd-81af-083fd5212ee9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.478044] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1456.479533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d46a8e-31c6-4bee-8c67-0620621eb33d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.489961] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f764bf9a-b9ee-4695-b9b7-407e2bbfa06e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.501206] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1456.502845] env[62508]: ERROR oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk due to incomplete transfer. [ 1456.502845] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-375b439b-4d4a-492c-8e14-abe994f2451c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.525873] env[62508]: DEBUG oslo_vmware.api [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775750, 'name': PowerOnVM_Task, 'duration_secs': 0.824193} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.527415] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.527564] env[62508]: INFO nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1456.527764] env[62508]: DEBUG nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.528075] env[62508]: DEBUG oslo_vmware.rw_handles [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f28599-9b69-7aca-a0ed-f97ab833234b/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1456.528269] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Uploaded image d5779115-b6af-4def-baed-8e5bbe416621 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1456.530406] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1456.531266] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62e5d6c-4a40-4601-bd09-f9b40ceada96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.533905] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-309c687a-29d6-43df-a60c-172d17cc94fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.548458] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551437} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.551107] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1456.551372] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1456.551706] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1456.551706] env[62508]: value = "task-1775753" [ 1456.551706] env[62508]: _type = "Task" [ 1456.551706] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.557511] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f464d31e-e12f-4492-9254-4de6f8cc88df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.571067] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775753, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.575100] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1456.575100] env[62508]: value = "task-1775754" [ 1456.575100] env[62508]: _type = "Task" [ 1456.575100] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.587965] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e5c55d-1f68-4662-80bb-51c6710c92ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.598225] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67a1969-b61f-4125-9666-e2999f572f85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.636437] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c504c0-e168-4f4d-a295-9edaaa40c9e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.646767] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41cc0f9-834b-4964-a1eb-2bf86c718c05 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.664286] env[62508]: DEBUG nova.compute.provider_tree [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.690244] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775752, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.798880] env[62508]: DEBUG oslo_vmware.api [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Task: {'id': task-1775749, 'name': ReconfigVM_Task, 'duration_secs': 1.193209} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.799210] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368655', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'name': 'volume-baa10618-33f6-4bdd-877d-bdda20c03e84', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d23d8f0-d7a9-4236-ad28-208e77b72138', 'attached_at': '', 'detached_at': '', 'volume_id': 'baa10618-33f6-4bdd-877d-bdda20c03e84', 'serial': 'baa10618-33f6-4bdd-877d-bdda20c03e84'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1456.916726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.917185] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Instance network_info: |[{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1456.917663] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:5e:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cafd5648-99e8-4c28-92bb-439b1d656b15', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1456.925672] env[62508]: DEBUG oslo.service.loopingcall [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1456.925910] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1456.926154] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f39ccf5-38a2-4643-8a54-7ad998b408b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.951185] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1456.951185] env[62508]: value = "task-1775755" [ 1456.951185] env[62508]: _type = "Task" [ 1456.951185] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.962932] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775755, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.070648] env[62508]: INFO nova.compute.manager [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Took 42.58 seconds to build instance. [ 1457.077516] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775753, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.086515] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278058} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.087682] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1457.088829] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25015a3e-6cf8-4516-b71b-a219ec8972a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.118873] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1457.119605] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d811256-00d1-4d2f-889c-960380087d7a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.140937] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1457.145473] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1457.145473] env[62508]: value = "task-1775756" [ 1457.145473] env[62508]: _type = "Task" [ 1457.145473] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.157017] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775756, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.170286] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1457.170587] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1457.170759] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.170927] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1457.171089] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.171249] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1457.171485] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1457.171651] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1457.172062] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1457.172344] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1457.172528] env[62508]: DEBUG nova.virt.hardware [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.173481] env[62508]: DEBUG nova.scheduler.client.report [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1457.177771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fc5aa9-dfc2-4e52-8c13-15d98cd28ae2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.193390] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775752, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.197029] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b015af-9b25-4773-a074-2ec43ba4dd86 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.279215] env[62508]: DEBUG nova.compute.manager [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Received event network-vif-plugged-cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.279501] env[62508]: DEBUG oslo_concurrency.lockutils [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] Acquiring lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.279749] env[62508]: DEBUG oslo_concurrency.lockutils [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.279939] env[62508]: DEBUG oslo_concurrency.lockutils [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.280169] env[62508]: DEBUG nova.compute.manager [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] No waiting events found dispatching network-vif-plugged-cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1457.280360] env[62508]: WARNING nova.compute.manager [req-266afd3d-5db5-47d8-9417-b7114dabee1f req-41287f4b-476a-4433-b3f1-87dd07021896 service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Received unexpected event network-vif-plugged-cafd5648-99e8-4c28-92bb-439b1d656b15 for instance with vm_state building and task_state spawning. [ 1457.360526] env[62508]: DEBUG nova.objects.instance [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lazy-loading 'flavor' on Instance uuid 7d23d8f0-d7a9-4236-ad28-208e77b72138 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1457.465190] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775755, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.571188] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775753, 'name': Destroy_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.572818] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8a6fa968-6297-4151-b2d9-fddf81ad3d59 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.926s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.660263] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775756, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.682922] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.682922] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1457.685691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.317s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.687125] env[62508]: INFO nova.compute.claims [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.701413] env[62508]: DEBUG oslo_vmware.api [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775752, 'name': PowerOnVM_Task, 'duration_secs': 1.617401} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.701413] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1457.704301] env[62508]: DEBUG nova.compute.manager [None req-974446f9-c7ba-4e19-8558-41875d4cd0ef tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1457.705507] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605c6f6d-4e07-492c-ac9a-a7f29c1b8a0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.929711] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Successfully updated port: e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1457.963809] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775755, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.073121] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775753, 'name': Destroy_Task, 'duration_secs': 1.028002} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.073121] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Destroyed the VM [ 1458.073509] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1458.073509] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c350022c-2124-4b0d-a044-bd130d6fc504 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.082684] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1458.082684] env[62508]: value = "task-1775757" [ 1458.082684] env[62508]: _type = "Task" [ 1458.082684] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.094693] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775757, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.095873] env[62508]: INFO nova.compute.manager [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Rescuing [ 1458.096152] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.096305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.096491] env[62508]: DEBUG nova.network.neutron [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1458.160552] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775756, 'name': ReconfigVM_Task, 'duration_secs': 0.55327} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.160813] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f/38d294a9-2f51-438d-b942-a88e380a981f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.161498] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-740e595f-9412-4278-83d2-1bdb6c92ed2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.170534] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1458.170534] env[62508]: value = "task-1775758" [ 1458.170534] env[62508]: _type = "Task" [ 1458.170534] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.181259] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775758, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.197060] env[62508]: DEBUG nova.compute.utils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1458.204023] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1458.204023] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1458.271596] env[62508]: DEBUG nova.policy [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7de47c22ac8743b7b733dbb97b5bf42d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df92e9d833ff45b99be76310e6bda526', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1458.369925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dddcf292-a86d-4261-831e-e1cd93498521 tempest-VolumesAssistedSnapshotsTest-605138019 tempest-VolumesAssistedSnapshotsTest-605138019-project-admin] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.281s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.419768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.420014] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.438454] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.438647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.438803] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1458.466408] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775755, 'name': CreateVM_Task, 'duration_secs': 1.251184} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.466581] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.467306] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.467822] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.467822] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.468070] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be2e61ce-cced-4869-9069-49500fcc0f49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.474832] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1458.474832] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7a72f-e769-8a61-b25a-caf4edcb017b" [ 1458.474832] env[62508]: _type = "Task" [ 1458.474832] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.485348] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7a72f-e769-8a61-b25a-caf4edcb017b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.583016] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "db74146d-abc3-4d48-be1b-6ad471794dbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.583321] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.583535] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.583714] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.583914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.589365] env[62508]: INFO nova.compute.manager [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Terminating instance [ 1458.591709] env[62508]: DEBUG nova.compute.manager [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.591709] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.592488] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02df294-a846-441d-8f8c-0e0e75a5bdb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.598830] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775757, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.615253] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.615606] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4b0a748-56a7-43ac-ad41-ff2cac3e161f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.684501] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775758, 'name': Rename_Task, 'duration_secs': 0.238888} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.684801] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1458.688019] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec5ceeee-1310-4697-9321-066b391e7a34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.694162] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1458.694162] env[62508]: value = "task-1775760" [ 1458.694162] env[62508]: _type = "Task" [ 1458.694162] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.704118] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1458.714087] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775760, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.756718] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Successfully created port: 268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1458.922688] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1458.993194] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7a72f-e769-8a61-b25a-caf4edcb017b, 'name': SearchDatastore_Task, 'duration_secs': 0.014958} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.996797] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.997124] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1458.997375] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.997524] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.997703] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.999273] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cd9867a-71a2-48a5-a7c8-72d6653e03ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.007923] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1459.021044] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.021256] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.026553] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca9e0fc-2177-4146-8c9e-cc573a8a3adf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.032806] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1459.032806] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526152e3-78a7-300c-0968-0e03b43382c3" [ 1459.032806] env[62508]: _type = "Task" [ 1459.032806] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.047276] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526152e3-78a7-300c-0968-0e03b43382c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.053639] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.053856] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.054153] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] db74146d-abc3-4d48-be1b-6ad471794dbf {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.054330] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29201ea5-b6fa-48ba-a6ae-9db6e31a2abe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.065878] env[62508]: DEBUG oslo_vmware.api [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1459.065878] env[62508]: value = "task-1775761" [ 1459.065878] env[62508]: _type = "Task" [ 1459.065878] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.071225] env[62508]: DEBUG nova.network.neutron [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updating instance_info_cache with network_info: [{"id": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "address": "fa:16:3e:bd:a3:de", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fef6288-17", "ovs_interfaceid": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.081232] env[62508]: DEBUG oslo_vmware.api [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.099716] env[62508]: DEBUG oslo_vmware.api [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775757, 'name': RemoveSnapshot_Task, 'duration_secs': 0.900903} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.100018] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1459.100269] env[62508]: INFO nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 1459.227024] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775760, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.299751] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe38c5f4-180e-4ad4-b98c-d43762a3be6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.311305] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbd4f43-9694-4167-86bc-0fb6ea81198e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.349273] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062a2d5d-c108-4720-86dd-ba14ac5ef968 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.360026] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9657be-976f-46a1-b067-10077ac0f034 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.376260] env[62508]: DEBUG nova.compute.provider_tree [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1459.388756] env[62508]: DEBUG nova.network.neutron [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Updating instance_info_cache with network_info: [{"id": "e9750a97-050e-4f74-b663-2e63804efb6f", "address": "fa:16:3e:c5:74:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9750a97-05", "ovs_interfaceid": "e9750a97-050e-4f74-b663-2e63804efb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.446126] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.543111] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526152e3-78a7-300c-0968-0e03b43382c3, 'name': SearchDatastore_Task, 'duration_secs': 0.014067} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.543975] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af2c4204-22a3-4e4c-8a9c-d8aaedc67205 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.551149] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1459.551149] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f2e52-8f14-e8de-5537-a7e748c4e31d" [ 1459.551149] env[62508]: _type = "Task" [ 1459.551149] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.561723] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f2e52-8f14-e8de-5537-a7e748c4e31d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.575427] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.577592] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.577840] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.586872] env[62508]: DEBUG oslo_vmware.api [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227981} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.587336] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.587512] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.587764] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.588081] env[62508]: INFO nova.compute.manager [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Took 1.00 seconds to destroy the instance on the hypervisor. [ 1459.588905] env[62508]: DEBUG oslo.service.loopingcall [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.589206] env[62508]: DEBUG nova.compute.manager [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.589364] env[62508]: DEBUG nova.network.neutron [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.608533] env[62508]: DEBUG nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance disappeared during snapshot {{(pid=62508) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1459.619095] env[62508]: DEBUG nova.compute.manager [None req-403d3be9-3c24-45c7-a953-604fdc4bb986 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image not found during clean up d5779115-b6af-4def-baed-8e5bbe416621 {{(pid=62508) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4501}} [ 1459.713259] env[62508]: DEBUG oslo_vmware.api [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775760, 'name': PowerOnVM_Task, 'duration_secs': 0.662531} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.714216] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.714216] env[62508]: DEBUG nova.compute.manager [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1459.717402] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6896a550-3b1a-4070-9f56-f84540f7f907 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.727020] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1459.761415] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1459.761682] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1459.761839] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1459.762469] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1459.762722] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1459.762967] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1459.763321] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1459.763581] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1459.763835] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1459.764066] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1459.764610] env[62508]: DEBUG nova.virt.hardware [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1459.765973] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96578111-278e-4bbe-a436-ae87f7a8e520 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.778137] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8900624-98db-4fab-9cbd-d6e271243528 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.887395] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Received event network-changed-cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.887395] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Refreshing instance network info cache due to event network-changed-cafd5648-99e8-4c28-92bb-439b1d656b15. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1459.887395] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.887832] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.887832] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Refreshing network info cache for port cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.891726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.892025] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Instance network_info: |[{"id": "e9750a97-050e-4f74-b663-2e63804efb6f", "address": "fa:16:3e:c5:74:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9750a97-05", "ovs_interfaceid": "e9750a97-050e-4f74-b663-2e63804efb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1459.892710] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:74:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9750a97-050e-4f74-b663-2e63804efb6f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1459.902238] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating folder: Project (df92e9d833ff45b99be76310e6bda526). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1459.905938] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e3ad15d-e74d-4ddf-99db-33788c1f2e6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.909778] env[62508]: ERROR nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [req-42680ce2-fbd7-40be-aea5-cf6c1dab3bec] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42680ce2-fbd7-40be-aea5-cf6c1dab3bec"}]} [ 1459.925162] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Created folder: Project (df92e9d833ff45b99be76310e6bda526) in parent group-v368536. [ 1459.925390] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating folder: Instances. Parent ref: group-v368666. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1459.925667] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f6190b1-bba2-42df-912f-3b029136c72c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.934541] env[62508]: DEBUG nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1459.940491] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Created folder: Instances in parent group-v368666. [ 1459.940741] env[62508]: DEBUG oslo.service.loopingcall [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.941188] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1459.943497] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82de7b3d-d0d8-404f-be2f-321b9ecf85a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.963974] env[62508]: DEBUG nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1459.964234] env[62508]: DEBUG nova.compute.provider_tree [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1459.976013] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1459.976013] env[62508]: value = "task-1775765" [ 1459.976013] env[62508]: _type = "Task" [ 1459.976013] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.981779] env[62508]: DEBUG nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1459.991885] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775765, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.013132] env[62508]: DEBUG nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1460.063247] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f2e52-8f14-e8de-5537-a7e748c4e31d, 'name': SearchDatastore_Task, 'duration_secs': 0.014671} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.066356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.066602] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.067187] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85b5a5a0-a29f-4a1b-89e0-dedd9a51d11c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.070124] env[62508]: INFO nova.compute.manager [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Unrescuing [ 1460.070402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.070572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.070755] env[62508]: DEBUG nova.network.neutron [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.079485] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1460.079485] env[62508]: value = "task-1775766" [ 1460.079485] env[62508]: _type = "Task" [ 1460.079485] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.096581] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.116810] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1460.117321] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-863682cd-4393-4557-abd2-e506a7f6cd67 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.125998] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1460.125998] env[62508]: value = "task-1775767" [ 1460.125998] env[62508]: _type = "Task" [ 1460.125998] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.139291] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.243754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.370454] env[62508]: DEBUG nova.compute.manager [req-f3af9c4b-b254-4cb3-8ec7-376baa80730a req-0d297f0c-5fdf-4292-8fd5-67fdc474c500 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Received event network-vif-deleted-4efa45b1-9d69-4e50-980d-b8a62b229d03 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1460.377304] env[62508]: INFO nova.compute.manager [req-f3af9c4b-b254-4cb3-8ec7-376baa80730a req-0d297f0c-5fdf-4292-8fd5-67fdc474c500 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Neutron deleted interface 4efa45b1-9d69-4e50-980d-b8a62b229d03; detaching it from the instance and deleting it from the info cache [ 1460.377304] env[62508]: DEBUG nova.network.neutron [req-f3af9c4b-b254-4cb3-8ec7-376baa80730a req-0d297f0c-5fdf-4292-8fd5-67fdc474c500 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.473721] env[62508]: DEBUG nova.network.neutron [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.490027] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775765, 'name': CreateVM_Task, 'duration_secs': 0.50119} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.494590] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1460.498067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.498067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.498067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1460.498618] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81432d82-c4f4-4601-808b-19b1028bdf6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.506513] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1460.506513] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ace6f-47d8-ec1e-6a59-d7d5b1cc5ac1" [ 1460.506513] env[62508]: _type = "Task" [ 1460.506513] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.516421] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c76c00e-1c54-4b66-af88-2577ce86320b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.525256] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ace6f-47d8-ec1e-6a59-d7d5b1cc5ac1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.529303] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173bbf91-a60c-4385-99ee-3cd9f72d43ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.572165] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d1abb0-c581-4fb5-a6ff-1e2f8136995c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.591697] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac3f743-9663-4485-b67f-ae64a5adccf4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.613260] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775766, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.613897] env[62508]: DEBUG nova.compute.provider_tree [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1460.652786] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775767, 'name': PowerOffVM_Task, 'duration_secs': 0.33248} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.653122] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.654045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95562dd6-3e06-4fc8-bfb0-5d9af86256db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.678313] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f591f010-fbba-4aed-8545-357e723ede7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.724201] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1460.724535] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1143c24f-162e-4f4f-9b8d-23622615eb3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.731917] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1460.731917] env[62508]: value = "task-1775768" [ 1460.731917] env[62508]: _type = "Task" [ 1460.731917] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.747041] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1460.747425] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1460.747590] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.747739] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.747911] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1460.748206] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbf96e79-9363-466e-b141-a92131579fd0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.762657] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1460.762898] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1460.763765] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a0a0ae5-f681-410e-945e-57c4965da9cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.771297] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1460.771297] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5255d80b-1f19-c8d8-05e8-dea1ca53b1e3" [ 1460.771297] env[62508]: _type = "Task" [ 1460.771297] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.779235] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5255d80b-1f19-c8d8-05e8-dea1ca53b1e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.782714] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updated VIF entry in instance network info cache for port cafd5648-99e8-4c28-92bb-439b1d656b15. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.783075] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.880506] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1671a64f-75e3-449d-b3c3-4459cf288589 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.890403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0294705b-4e0f-455e-bfcc-6ce7f147ef5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.927143] env[62508]: DEBUG nova.compute.manager [req-f3af9c4b-b254-4cb3-8ec7-376baa80730a req-0d297f0c-5fdf-4292-8fd5-67fdc474c500 service nova] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Detach interface failed, port_id=4efa45b1-9d69-4e50-980d-b8a62b229d03, reason: Instance db74146d-abc3-4d48-be1b-6ad471794dbf could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1460.953049] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Successfully updated port: 268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.976885] env[62508]: INFO nova.compute.manager [-] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Took 1.39 seconds to deallocate network for instance. [ 1461.006839] env[62508]: DEBUG nova.network.neutron [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Updating instance_info_cache with network_info: [{"id": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "address": "fa:16:3e:fa:b1:10", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985905ec-2a", "ovs_interfaceid": "985905ec-2a79-4b7a-b4ad-d3bf00a42f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.020169] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ace6f-47d8-ec1e-6a59-d7d5b1cc5ac1, 'name': SearchDatastore_Task, 'duration_secs': 0.060534} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.021200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.021200] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.021200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.099598] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639593} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.099943] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.100187] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.100518] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7a72180-9eb7-4f78-b6da-3e0cf518165e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.109863] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1461.109863] env[62508]: value = "task-1775769" [ 1461.109863] env[62508]: _type = "Task" [ 1461.109863] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.123938] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775769, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.157295] env[62508]: DEBUG nova.scheduler.client.report [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1461.157611] env[62508]: DEBUG nova.compute.provider_tree [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 70 to 71 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1461.157926] env[62508]: DEBUG nova.compute.provider_tree [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1461.287297] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5255d80b-1f19-c8d8-05e8-dea1ca53b1e3, 'name': SearchDatastore_Task, 'duration_secs': 0.019187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.288483] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.288742] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Received event network-vif-plugged-e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1461.288964] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Acquiring lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.289280] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.289490] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.289677] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] No waiting events found dispatching network-vif-plugged-e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1461.289876] env[62508]: WARNING nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Received unexpected event network-vif-plugged-e9750a97-050e-4f74-b663-2e63804efb6f for instance with vm_state building and task_state spawning. [ 1461.290123] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Received event network-changed-e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1461.290350] env[62508]: DEBUG nova.compute.manager [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Refreshing instance network info cache due to event network-changed-e9750a97-050e-4f74-b663-2e63804efb6f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1461.290584] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Acquiring lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.290770] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Acquired lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.291012] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Refreshing network info cache for port e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.292365] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8b2b6b2-c448-4e02-a2a5-9f33f3ae98a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.300586] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1461.300586] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5216d931-e1c5-bff6-5dee-c6a1a1efb62d" [ 1461.300586] env[62508]: _type = "Task" [ 1461.300586] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.314442] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5216d931-e1c5-bff6-5dee-c6a1a1efb62d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.455541] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.455702] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.455861] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.483587] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.512862] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.513677] env[62508]: DEBUG nova.objects.instance [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'flavor' on Instance uuid 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1461.622023] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775769, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107049} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.622023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.622693] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b62282-b411-4d6b-a251-444b958d86bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.647900] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1461.648284] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb5f588e-1a47-455e-a47e-4d505c5520b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.665112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.979s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.665768] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1461.668403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.578s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.670213] env[62508]: INFO nova.compute.claims [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1461.680643] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1461.680643] env[62508]: value = "task-1775770" [ 1461.680643] env[62508]: _type = "Task" [ 1461.680643] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.689911] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.743852] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "38d294a9-2f51-438d-b942-a88e380a981f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.744183] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.744441] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "38d294a9-2f51-438d-b942-a88e380a981f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.744672] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.744889] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.747611] env[62508]: INFO nova.compute.manager [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Terminating instance [ 1461.749797] env[62508]: DEBUG nova.compute.manager [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1461.750051] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1461.751074] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a244c6-f43f-46c4-82a5-8b98fd557b36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.760326] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1461.760621] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1eb43112-cb8f-4552-b3f4-57f410c2411c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.769837] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1461.769837] env[62508]: value = "task-1775771" [ 1461.769837] env[62508]: _type = "Task" [ 1461.769837] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.780445] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.814322] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5216d931-e1c5-bff6-5dee-c6a1a1efb62d, 'name': SearchDatastore_Task, 'duration_secs': 0.017826} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.815075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.815075] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1461.815455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.815455] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.815714] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a6d1da7-d9d5-470f-beba-ef6ec074aa7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.818285] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c59ca43e-f3cb-4250-a4d5-37fd0693512a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.829443] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1461.829443] env[62508]: value = "task-1775772" [ 1461.829443] env[62508]: _type = "Task" [ 1461.829443] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.830868] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.831105] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.836337] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78a67151-7b13-4877-b546-985a3dbe13b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.857063] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.857341] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.857630] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1461.857630] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c03be5-ef10-478f-d9ad-53a41134ad0b" [ 1461.857630] env[62508]: _type = "Task" [ 1461.857630] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.857866] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.868618] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c03be5-ef10-478f-d9ad-53a41134ad0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.024598] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1462.036024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cf519b-1e40-46a8-8d88-2adb30cef746 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.066305] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1462.071856] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-681cf3e8-b98e-492b-851f-233fcd18872e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.083239] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1462.083239] env[62508]: value = "task-1775773" [ 1462.083239] env[62508]: _type = "Task" [ 1462.083239] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.093205] env[62508]: DEBUG nova.compute.manager [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Received event network-vif-plugged-268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.093205] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Acquiring lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.093703] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.093826] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.094142] env[62508]: DEBUG nova.compute.manager [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] No waiting events found dispatching network-vif-plugged-268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1462.094500] env[62508]: WARNING nova.compute.manager [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Received unexpected event network-vif-plugged-268ea73b-a264-453c-969d-59f58dd50192 for instance with vm_state building and task_state spawning. [ 1462.094903] env[62508]: DEBUG nova.compute.manager [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Received event network-changed-268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.094992] env[62508]: DEBUG nova.compute.manager [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Refreshing instance network info cache due to event network-changed-268ea73b-a264-453c-969d-59f58dd50192. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1462.095368] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Acquiring lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.103712] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.176384] env[62508]: DEBUG nova.compute.utils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1462.184673] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1462.185020] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1462.204474] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775770, 'name': ReconfigVM_Task, 'duration_secs': 0.471128} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.204474] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.204767] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14f27225-ab3a-4f2e-93c4-350a4c4c126d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.215445] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1462.215445] env[62508]: value = "task-1775774" [ 1462.215445] env[62508]: _type = "Task" [ 1462.215445] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.228101] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775774, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.265038] env[62508]: DEBUG nova.policy [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64a1365de2b7431ebbc9b5066dd0f974', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a34358e29dde46139ee4aa5c8f57d0d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1462.279446] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Updated VIF entry in instance network info cache for port e9750a97-050e-4f74-b663-2e63804efb6f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.280254] env[62508]: DEBUG nova.network.neutron [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Updating instance_info_cache with network_info: [{"id": "e9750a97-050e-4f74-b663-2e63804efb6f", "address": "fa:16:3e:c5:74:ba", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9750a97-05", "ovs_interfaceid": "e9750a97-050e-4f74-b663-2e63804efb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.292079] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775771, 'name': PowerOffVM_Task, 'duration_secs': 0.244729} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.292538] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1462.292664] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1462.293576] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e51be4f4-80a5-4520-8f0b-5ee6b2aecc62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.346044] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775772, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.371565] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c03be5-ef10-478f-d9ad-53a41134ad0b, 'name': SearchDatastore_Task, 'duration_secs': 0.035613} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.372644] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a215da87-95ca-4c28-865b-a06d2f692732 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.376303] env[62508]: DEBUG nova.network.neutron [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Updating instance_info_cache with network_info: [{"id": "268ea73b-a264-453c-969d-59f58dd50192", "address": "fa:16:3e:41:9f:6c", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap268ea73b-a2", "ovs_interfaceid": "268ea73b-a264-453c-969d-59f58dd50192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.383570] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1462.383570] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d9e49e-52d5-db20-33f7-63bb381e2d48" [ 1462.383570] env[62508]: _type = "Task" [ 1462.383570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.395144] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d9e49e-52d5-db20-33f7-63bb381e2d48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.492173] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "f456dd83-6350-46b2-b06c-41dc5c477358" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.492867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.581049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1462.581049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1462.581049] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] 38d294a9-2f51-438d-b942-a88e380a981f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1462.581049] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8239eb39-2763-4b94-bca4-b72fa816dca1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.596608] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775773, 'name': PowerOffVM_Task, 'duration_secs': 0.354627} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.596985] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1462.603707] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfiguring VM instance instance-00000023 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1462.604567] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1462.604567] env[62508]: value = "task-1775776" [ 1462.604567] env[62508]: _type = "Task" [ 1462.604567] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.606194] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ec641c7-ad55-49c4-a1e3-c99d95586fff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.654326] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.658964] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1462.658964] env[62508]: value = "task-1775777" [ 1462.658964] env[62508]: _type = "Task" [ 1462.658964] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.670352] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.681588] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1462.703709] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Successfully created port: 7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1462.727545] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775774, 'name': Rename_Task, 'duration_secs': 0.45528} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.727867] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1462.728135] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e724802-5fc0-4161-9f2e-785c259c7f4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.739638] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1462.739638] env[62508]: value = "task-1775778" [ 1462.739638] env[62508]: _type = "Task" [ 1462.739638] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.757667] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.785708] env[62508]: DEBUG oslo_concurrency.lockutils [req-31e576c7-1ff0-4c9d-9d74-da93620a1d48 req-2f680875-4bf5-40bd-8740-4327093e17ab service nova] Releasing lock "refresh_cache-06baedda-2926-4ec8-a4f6-d62713f48a26" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.844633] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682702} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.845528] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1462.849370] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fcf081-0150-421f-aa91-a7c4ca56dd24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.879677] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1462.883863] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f708221-4b89-435f-862d-ee9285d2f73a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.897915] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.898263] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Instance network_info: |[{"id": "268ea73b-a264-453c-969d-59f58dd50192", "address": "fa:16:3e:41:9f:6c", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap268ea73b-a2", "ovs_interfaceid": "268ea73b-a264-453c-969d-59f58dd50192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1462.905216] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Acquired lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.905216] env[62508]: DEBUG nova.network.neutron [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Refreshing network info cache for port 268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1462.905216] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:9f:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '268ea73b-a264-453c-969d-59f58dd50192', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1462.913108] env[62508]: DEBUG oslo.service.loopingcall [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1462.917970] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1462.922247] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6dcae400-2267-498d-a028-023ed736855d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.942494] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1462.942494] env[62508]: value = "task-1775779" [ 1462.942494] env[62508]: _type = "Task" [ 1462.942494] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.952156] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d9e49e-52d5-db20-33f7-63bb381e2d48, 'name': SearchDatastore_Task, 'duration_secs': 0.063275} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.954278] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.954639] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 06baedda-2926-4ec8-a4f6-d62713f48a26/06baedda-2926-4ec8-a4f6-d62713f48a26.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1462.954986] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1462.954986] env[62508]: value = "task-1775780" [ 1462.954986] env[62508]: _type = "Task" [ 1462.954986] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.955363] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-380bebac-f0d3-4d69-a39c-c23cbe1c11f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.969307] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775779, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.977844] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775780, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.978215] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1462.978215] env[62508]: value = "task-1775781" [ 1462.978215] env[62508]: _type = "Task" [ 1462.978215] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.991613] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.123507] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1463.124800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5431d922-0233-4a02-bdeb-150111146320 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.148481] env[62508]: DEBUG oslo_vmware.api [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.148600] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1463.148697] env[62508]: ERROR oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk due to incomplete transfer. [ 1463.149058] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1463.149203] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1463.149404] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1463.149605] env[62508]: INFO nova.compute.manager [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1463.149921] env[62508]: DEBUG oslo.service.loopingcall [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.150097] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d77e3146-f76c-4a67-9f3e-f8847a894549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.152940] env[62508]: DEBUG nova.compute.manager [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1463.153845] env[62508]: DEBUG nova.network.neutron [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1463.167130] env[62508]: DEBUG oslo_vmware.rw_handles [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679c17-2be1-01fe-32d2-212772188e53/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1463.167365] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Uploaded image 8584a906-3096-4f94-aa62-d77ab0fc45bb to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1463.169719] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1463.170391] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fbb9e00c-b875-411b-a93d-13e71473ff93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.176237] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775777, 'name': ReconfigVM_Task, 'duration_secs': 0.263552} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.177481] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Reconfigured VM instance instance-00000023 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1463.177682] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1463.178284] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1463.178284] env[62508]: value = "task-1775782" [ 1463.178284] env[62508]: _type = "Task" [ 1463.178284] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.178284] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00d39a5c-e1ab-4506-870e-6703544100a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.197993] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1463.197993] env[62508]: value = "task-1775783" [ 1463.197993] env[62508]: _type = "Task" [ 1463.197993] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.201477] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775782, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.211910] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.252006] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775778, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.329546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad572058-50cc-4153-a0ac-b7e16040dc1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.340598] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8263a72f-00d1-4f53-b239-897cebb9fdb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.376998] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ff2ab8-5dd2-4306-b242-e123808e38e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.387205] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba5099d-af04-4baf-bb6c-e1d70bf05c11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.406376] env[62508]: DEBUG nova.compute.provider_tree [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1463.418222] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.418430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.418787] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.419091] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.419396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.424108] env[62508]: INFO nova.compute.manager [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Terminating instance [ 1463.428021] env[62508]: DEBUG nova.compute.manager [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1463.428021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1463.428552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab86d71-66e7-42d4-b275-3058d2566cf8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.442598] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.442598] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cc815b7-6fa7-479d-8229-bfc670c34fab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.457171] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1463.457171] env[62508]: value = "task-1775784" [ 1463.457171] env[62508]: _type = "Task" [ 1463.457171] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.461394] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775779, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.478850] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775780, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.487875] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.503186] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775781, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.640482] env[62508]: DEBUG nova.compute.manager [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1463.641856] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6302fb-4b5b-40f7-aae5-b62c9c24834c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.695773] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775782, 'name': Destroy_Task, 'duration_secs': 0.511297} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.697245] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1463.699854] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Destroyed the VM [ 1463.700258] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1463.701434] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8ecbb1fa-0624-4bf8-8705-2962784dc75b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.716190] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775783, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.718149] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1463.718149] env[62508]: value = "task-1775785" [ 1463.718149] env[62508]: _type = "Task" [ 1463.718149] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.735665] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1463.736068] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1463.736242] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1463.736484] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1463.736484] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1463.736744] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1463.736868] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1463.737053] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1463.737232] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1463.737397] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1463.737580] env[62508]: DEBUG nova.virt.hardware [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1463.738350] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775785, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.739456] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ededec4-e96d-4e49-a602-6a0b232db75b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.758269] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b12c03-e1f7-488f-9624-267629526e04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.762845] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775778, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.819812] env[62508]: DEBUG nova.network.neutron [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Updated VIF entry in instance network info cache for port 268ea73b-a264-453c-969d-59f58dd50192. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1463.820315] env[62508]: DEBUG nova.network.neutron [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Updating instance_info_cache with network_info: [{"id": "268ea73b-a264-453c-969d-59f58dd50192", "address": "fa:16:3e:41:9f:6c", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap268ea73b-a2", "ovs_interfaceid": "268ea73b-a264-453c-969d-59f58dd50192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.928322] env[62508]: ERROR nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [req-f5547068-2386-4b66-9f1b-be2ce1770484] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f5547068-2386-4b66-9f1b-be2ce1770484"}]} [ 1463.950191] env[62508]: DEBUG nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1463.959673] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775779, 'name': ReconfigVM_Task, 'duration_secs': 0.690699} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.959975] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.960865] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfa2e47-e2a5-4bcd-b438-696f79ef1587 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.970960] env[62508]: DEBUG nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1463.971188] env[62508]: DEBUG nova.compute.provider_tree [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1464.003026] env[62508]: DEBUG nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1464.006491] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18693df6-4174-4676-8232-44d0b8e86aee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.016693] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775780, 'name': CreateVM_Task, 'duration_secs': 0.831642} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.016693] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775784, 'name': PowerOffVM_Task, 'duration_secs': 0.419025} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.017041] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1464.017320] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.017492] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1464.018814] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.018969] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.019383] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1464.019578] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce95f6fd-6f8d-4774-90b9-235931d0b28a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.028117] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b117a5c-d5a5-494c-a91e-14c2872c0269 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.028648] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773311} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.030686] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 06baedda-2926-4ec8-a4f6-d62713f48a26/06baedda-2926-4ec8-a4f6-d62713f48a26.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.030886] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.031231] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1464.031231] env[62508]: value = "task-1775786" [ 1464.031231] env[62508]: _type = "Task" [ 1464.031231] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.032700] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79d2eb3e-0e24-4f8b-abb3-7718ba535e26 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.038685] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1464.038685] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52564bb7-36db-a340-0dbf-782ed10e6609" [ 1464.038685] env[62508]: _type = "Task" [ 1464.038685] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.039648] env[62508]: DEBUG nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1464.042316] env[62508]: DEBUG nova.network.neutron [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.049823] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1464.049823] env[62508]: value = "task-1775788" [ 1464.049823] env[62508]: _type = "Task" [ 1464.049823] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.054515] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775786, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.060516] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52564bb7-36db-a340-0dbf-782ed10e6609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.068769] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775788, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.126202] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1464.126825] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1464.126825] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Deleting the datastore file [datastore1] 7d23d8f0-d7a9-4236-ad28-208e77b72138 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.130477] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f47cc39-9ceb-4a69-a030-6d20297ce85c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.140290] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for the task: (returnval){ [ 1464.140290] env[62508]: value = "task-1775789" [ 1464.140290] env[62508]: _type = "Task" [ 1464.140290] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.149871] env[62508]: DEBUG nova.compute.manager [req-f0360e4c-ffdd-4f20-8ca6-30e08229aabd req-d757aa04-c0b6-4a1c-a863-1186114b0b7a service nova] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Received event network-vif-deleted-bf83eb47-d009-45ec-9583-6e3d46a6f0f8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1464.153884] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.156204] env[62508]: INFO nova.compute.manager [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] instance snapshotting [ 1464.162108] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6ba7a0-48f8-411f-b616-578da0404a35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.192889] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2607cbd-f70e-430a-9c1b-1884707c0a33 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.218456] env[62508]: DEBUG oslo_vmware.api [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775783, 'name': PowerOnVM_Task, 'duration_secs': 0.775953} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.218456] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.219241] env[62508]: DEBUG nova.compute.manager [None req-a1ac01c1-60eb-4222-ab3f-3b4a79d1fc8e tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1464.219658] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ffb4cc-00e2-44ef-bb8c-916e2c9f6105 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.237885] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775785, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.258021] env[62508]: DEBUG oslo_vmware.api [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775778, 'name': PowerOnVM_Task, 'duration_secs': 1.039186} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.258021] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.258227] env[62508]: INFO nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1464.258393] env[62508]: DEBUG nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1464.259577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a53a68-97ec-4d83-868d-276ef7563465 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.323830] env[62508]: DEBUG oslo_concurrency.lockutils [req-a4e39fc8-3e3e-4589-9761-603415115adc req-5785118c-1adc-4923-b562-c6888f9d1882 service nova] Releasing lock "refresh_cache-45de6dd5-97f3-4eea-a171-0254a2b37a41" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.505188] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Successfully updated port: 7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1464.548503] env[62508]: INFO nova.compute.manager [-] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Took 1.40 seconds to deallocate network for instance. [ 1464.561581] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775786, 'name': ReconfigVM_Task, 'duration_secs': 0.329411} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.569510] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.570409] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52564bb7-36db-a340-0dbf-782ed10e6609, 'name': SearchDatastore_Task, 'duration_secs': 0.02602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.573942] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-787d9ca2-5d1c-4ced-b31d-32be109b4717 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.576091] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.576367] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1464.576796] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.576987] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.577351] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1464.578788] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89042030-3b61-4236-b029-d46e6a9bd077 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.591087] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775788, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097272} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.594059] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.594934] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1464.594934] env[62508]: value = "task-1775790" [ 1464.594934] env[62508]: _type = "Task" [ 1464.594934] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.594934] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1464.595310] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1464.597901] env[62508]: DEBUG nova.compute.manager [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received event network-vif-plugged-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1464.598082] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] Acquiring lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.598276] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.598445] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.598634] env[62508]: DEBUG nova.compute.manager [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] No waiting events found dispatching network-vif-plugged-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1464.598828] env[62508]: WARNING nova.compute.manager [req-a2503a47-3fb1-4481-a2ca-86ac2cca5bd0 req-a944cf11-3395-427a-8009-920345f7e043 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received unexpected event network-vif-plugged-7b7bf998-34c3-4a34-9404-ba7189648de8 for instance with vm_state building and task_state spawning. [ 1464.602480] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0066d915-684a-4a53-846c-08ab14a7802f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.605924] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7e12c06-d373-4e55-bc70-7adb3af967dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.628832] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1464.628832] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5219e447-8647-9824-0e60-33971ef82eca" [ 1464.628832] env[62508]: _type = "Task" [ 1464.628832] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.638098] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 06baedda-2926-4ec8-a4f6-d62713f48a26/06baedda-2926-4ec8-a4f6-d62713f48a26.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.645765] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a308b5f3-2cdd-4521-b5fb-7bf128c8c089 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.660618] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775790, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.674807] env[62508]: DEBUG oslo_vmware.api [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Task: {'id': task-1775789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41288} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.679159] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.679370] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1464.679561] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1464.679740] env[62508]: INFO nova.compute.manager [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1464.679987] env[62508]: DEBUG oslo.service.loopingcall [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.680226] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5219e447-8647-9824-0e60-33971ef82eca, 'name': SearchDatastore_Task, 'duration_secs': 0.040952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.680474] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1464.680474] env[62508]: value = "task-1775791" [ 1464.680474] env[62508]: _type = "Task" [ 1464.680474] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.683195] env[62508]: DEBUG nova.compute.manager [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1464.683333] env[62508]: DEBUG nova.network.neutron [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1464.686896] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b663e0e8-f1ca-4e2f-b3bf-102b3bd233a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.700847] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775791, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.702081] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f257b58-bda5-4233-8d9e-b23c0d2d8da8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.705104] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1464.705104] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523bc009-410f-3d81-4da1-ceeedf5333fc" [ 1464.705104] env[62508]: _type = "Task" [ 1464.705104] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.706454] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1464.706860] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0724113b-53d5-4a83-9636-31583f403510 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.717403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb2be94-3d77-44ff-a72b-6d8b8b16625b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.722510] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523bc009-410f-3d81-4da1-ceeedf5333fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.726983] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1464.726983] env[62508]: value = "task-1775792" [ 1464.726983] env[62508]: _type = "Task" [ 1464.726983] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.761264] env[62508]: DEBUG oslo_vmware.api [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775785, 'name': RemoveSnapshot_Task, 'duration_secs': 0.614294} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.765117] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd292bdb-e505-419f-bc19-ae996510c698 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.767769] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1464.768079] env[62508]: INFO nova.compute.manager [None req-d607d91b-84f3-4a65-9e53-692cbfa775f0 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 16.64 seconds to snapshot the instance on the hypervisor. [ 1464.777947] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775792, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.788411] env[62508]: INFO nova.compute.manager [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Took 37.82 seconds to build instance. [ 1464.792437] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10bf08a-9501-4408-8b62-662d22825560 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.807321] env[62508]: DEBUG nova.compute.provider_tree [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1465.008043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.008238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.008379] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.072366] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.128767] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.196929] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775791, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.217333] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523bc009-410f-3d81-4da1-ceeedf5333fc, 'name': SearchDatastore_Task, 'duration_secs': 0.031153} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.217611] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.217871] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 45de6dd5-97f3-4eea-a171-0254a2b37a41/45de6dd5-97f3-4eea-a171-0254a2b37a41.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1465.218160] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d770346a-23c5-4230-952c-ace064af8948 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.227321] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1465.227321] env[62508]: value = "task-1775793" [ 1465.227321] env[62508]: _type = "Task" [ 1465.227321] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.240785] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.243931] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775792, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.296196] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ebc30c8e-7e32-4209-afb0-ad9f5348a1a5 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.446s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.348298] env[62508]: DEBUG nova.scheduler.client.report [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1465.348773] env[62508]: DEBUG nova.compute.provider_tree [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 73 to 74 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1465.349110] env[62508]: DEBUG nova.compute.provider_tree [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1465.576619] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1465.626146] env[62508]: DEBUG oslo_vmware.api [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775790, 'name': PowerOnVM_Task, 'duration_secs': 0.935905} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.626146] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1465.626146] env[62508]: DEBUG nova.compute.manager [None req-b4b3aa76-9f4b-4897-be97-9b0080bca8f5 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1465.627149] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e092f5c9-04aa-475f-b920-a3d8542b0765 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.699009] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775791, 'name': ReconfigVM_Task, 'duration_secs': 0.803582} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.700135] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 06baedda-2926-4ec8-a4f6-d62713f48a26/06baedda-2926-4ec8-a4f6-d62713f48a26.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.701051] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2c4535a-ea3a-486e-b0cc-f29eae21d275 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.710974] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1465.710974] env[62508]: value = "task-1775794" [ 1465.710974] env[62508]: _type = "Task" [ 1465.710974] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.723102] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775794, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.742596] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.747066] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775792, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.762241] env[62508]: DEBUG nova.network.neutron [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.801119] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1465.839555] env[62508]: DEBUG nova.network.neutron [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.857755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.189s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.858029] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1465.864437] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.508s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.864437] env[62508]: INFO nova.compute.claims [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1466.222025] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775794, 'name': Rename_Task, 'duration_secs': 0.335121} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.222466] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.222625] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-523b10ad-afcf-4533-a445-cbed32f6e13d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.237578] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1466.237578] env[62508]: value = "task-1775795" [ 1466.237578] env[62508]: _type = "Task" [ 1466.237578] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.248270] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775792, 'name': CreateSnapshot_Task, 'duration_secs': 1.500266} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.248648] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711973} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.249359] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1466.249700] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 45de6dd5-97f3-4eea-a171-0254a2b37a41/45de6dd5-97f3-4eea-a171-0254a2b37a41.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.249957] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.250798] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db497dd-5607-4201-8916-669dd516fd50 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.253602] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a859765a-6ab2-4965-a298-b56d65dbdfee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.258850] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775795, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.269517] env[62508]: INFO nova.compute.manager [-] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Took 1.59 seconds to deallocate network for instance. [ 1466.270031] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1466.270031] env[62508]: value = "task-1775796" [ 1466.270031] env[62508]: _type = "Task" [ 1466.270031] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.284901] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.332265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.342718] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.343093] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Instance network_info: |[{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1466.343546] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:89:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b7bf998-34c3-4a34-9404-ba7189648de8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1466.353052] env[62508]: DEBUG oslo.service.loopingcall [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.353335] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1466.353779] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10112c43-7c12-4aa6-8eb7-6e29120d8b95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.373282] env[62508]: DEBUG nova.compute.utils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.376814] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1466.376987] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1466.386620] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1466.386620] env[62508]: value = "task-1775797" [ 1466.386620] env[62508]: _type = "Task" [ 1466.386620] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.397695] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775797, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.433173] env[62508]: DEBUG nova.policy [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb7bd4b3c5d844149357f18dda723562', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9b1180071bc4cc2a419daac2f51e3f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1466.709692] env[62508]: DEBUG nova.compute.manager [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.709893] env[62508]: DEBUG nova.compute.manager [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing instance network info cache due to event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1466.710318] env[62508]: DEBUG oslo_concurrency.lockutils [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] Acquiring lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.710523] env[62508]: DEBUG oslo_concurrency.lockutils [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] Acquired lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.710702] env[62508]: DEBUG nova.network.neutron [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1466.750452] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775795, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.778869] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1466.780354] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.780614] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-339cd458-2014-4612-861e-07dddc933fd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.793427] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074946} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.794673] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1466.795060] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1466.795060] env[62508]: value = "task-1775798" [ 1466.795060] env[62508]: _type = "Task" [ 1466.795060] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.795768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b71e8f4-dd98-4445-9543-59cd5ce2d60b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.812144] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775798, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.835055] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 45de6dd5-97f3-4eea-a171-0254a2b37a41/45de6dd5-97f3-4eea-a171-0254a2b37a41.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.835055] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1340a363-45b3-4ac0-bed8-32839db28842 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.859680] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1466.859680] env[62508]: value = "task-1775799" [ 1466.859680] env[62508]: _type = "Task" [ 1466.859680] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.871659] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775799, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.884299] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1466.887778] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Successfully created port: bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1466.901639] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775797, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.907700] env[62508]: INFO nova.compute.manager [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Unrescuing [ 1466.908017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.908189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquired lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.908732] env[62508]: DEBUG nova.network.neutron [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1466.942586] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.942869] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.943117] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.943473] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.943579] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.950997] env[62508]: INFO nova.compute.manager [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Terminating instance [ 1466.952723] env[62508]: DEBUG nova.compute.manager [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1466.952723] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1466.959120] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb715a1-80e0-4896-8822-de7dffc3bbee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.968035] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1466.968437] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b2df6ec-d3dc-48be-9622-66ebc6d2b0f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.980962] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1466.980962] env[62508]: value = "task-1775800" [ 1466.980962] env[62508]: _type = "Task" [ 1466.980962] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.993353] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.256247] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775795, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.314278] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775798, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.374094] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.410677] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775797, 'name': CreateVM_Task, 'duration_secs': 0.963224} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.410774] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1467.414694] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.414864] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.415202] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1467.419233] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f43a9af-bb90-41a9-89f2-930a40058930 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.426947] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1467.426947] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cbe2ab-b63d-b7fd-a6bf-2ed2cd4edf25" [ 1467.426947] env[62508]: _type = "Task" [ 1467.426947] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.440529] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cbe2ab-b63d-b7fd-a6bf-2ed2cd4edf25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.497361] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775800, 'name': PowerOffVM_Task, 'duration_secs': 0.263405} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.497361] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1467.497361] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1467.497744] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb4b11ad-1a91-4811-97b7-5d27f2cdac2d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.525300] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ab37e6-1f0e-433e-aaf8-40d65e414433 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.537453] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed7d5ad-ff8f-4109-b6df-6accad5ae793 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.577649] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22be398e-1d3c-45e1-ae58-354ba5566a2d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.581802] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1467.582024] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1467.582219] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Deleting the datastore file [datastore1] fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1467.582993] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a67d9950-d383-4b76-bc59-e1be8ec7c594 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.588426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82248c4-4cae-4ee6-8b5d-20af4430be5c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.595112] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for the task: (returnval){ [ 1467.595112] env[62508]: value = "task-1775802" [ 1467.595112] env[62508]: _type = "Task" [ 1467.595112] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.609556] env[62508]: DEBUG nova.compute.provider_tree [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.614183] env[62508]: DEBUG nova.network.neutron [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updated VIF entry in instance network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1467.614183] env[62508]: DEBUG nova.network.neutron [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.622181] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.693379] env[62508]: DEBUG nova.compute.manager [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1467.756644] env[62508]: DEBUG oslo_vmware.api [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775795, 'name': PowerOnVM_Task, 'duration_secs': 1.043304} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.756964] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.757260] env[62508]: INFO nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 10.62 seconds to spawn the instance on the hypervisor. [ 1467.757387] env[62508]: DEBUG nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.758278] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7748b93-23e8-4f6a-bb2f-75cda24dc28c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.786071] env[62508]: DEBUG nova.network.neutron [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updating instance_info_cache with network_info: [{"id": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "address": "fa:16:3e:bd:a3:de", "network": {"id": "60ac9aa7-8f09-4ba8-b8bd-545df0c62d98", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-289553303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d58abb4cdcb74100b7c81076c7642b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fef6288-17", "ovs_interfaceid": "8fef6288-1754-4516-ae14-0dc489d4bb0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.815483] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775798, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.872220] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775799, 'name': ReconfigVM_Task, 'duration_secs': 0.613923} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.872654] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 45de6dd5-97f3-4eea-a171-0254a2b37a41/45de6dd5-97f3-4eea-a171-0254a2b37a41.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.873386] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5231ff51-cb48-4bac-85a4-b73733891534 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.881857] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1467.881857] env[62508]: value = "task-1775803" [ 1467.881857] env[62508]: _type = "Task" [ 1467.881857] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.891614] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775803, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.902499] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1467.930024] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1467.930396] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1467.930601] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.930835] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1467.931018] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.931193] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1467.931676] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1467.931676] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1467.931834] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1467.931964] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1467.932172] env[62508]: DEBUG nova.virt.hardware [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1467.933265] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9607db9-b3fa-41e6-9711-a577ec4f2bb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.949266] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d89e4f-c5e7-489b-9967-64282a1c6f6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.953739] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cbe2ab-b63d-b7fd-a6bf-2ed2cd4edf25, 'name': SearchDatastore_Task, 'duration_secs': 0.018047} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.954064] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.954297] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1467.954527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.954675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.954848] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1467.955535] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-642ce822-f666-4d7e-9fe6-46cd1a8b6117 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.973013] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1467.973013] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1467.973013] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2915efa-a192-4bfe-9820-7147533fbd01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.976578] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1467.976578] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ee881-d2a9-ddfc-c4eb-a2fe7988f36d" [ 1467.976578] env[62508]: _type = "Task" [ 1467.976578] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.987659] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ee881-d2a9-ddfc-c4eb-a2fe7988f36d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.110570] env[62508]: DEBUG oslo_vmware.api [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Task: {'id': task-1775802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17501} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.114019] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1468.114019] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1468.114019] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1468.114019] env[62508]: INFO nova.compute.manager [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1468.114019] env[62508]: DEBUG oslo.service.loopingcall [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.114019] env[62508]: DEBUG nova.scheduler.client.report [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1468.119144] env[62508]: DEBUG nova.compute.manager [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1468.119273] env[62508]: DEBUG nova.network.neutron [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1468.121813] env[62508]: DEBUG oslo_concurrency.lockutils [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] Releasing lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.122136] env[62508]: DEBUG nova.compute.manager [req-c4b3b8ba-cc3a-469c-838a-cb5a27402d79 req-da42b7f1-fe5d-41d8-8425-e68797c70ca1 service nova] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Received event network-vif-deleted-87421d0c-fb71-4543-be75-596ccb1584a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.222848] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.280542] env[62508]: INFO nova.compute.manager [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 34.14 seconds to build instance. [ 1468.287352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Releasing lock "refresh_cache-63fca45d-5922-4a14-9936-30070c349f8e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.288343] env[62508]: DEBUG nova.objects.instance [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lazy-loading 'flavor' on Instance uuid 63fca45d-5922-4a14-9936-30070c349f8e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1468.317608] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775798, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.393836] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775803, 'name': Rename_Task, 'duration_secs': 0.274773} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.394200] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1468.394468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24fcb319-24a7-4551-b540-6199192fd97f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.403509] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1468.403509] env[62508]: value = "task-1775804" [ 1468.403509] env[62508]: _type = "Task" [ 1468.403509] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.418798] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.490546] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ee881-d2a9-ddfc-c4eb-a2fe7988f36d, 'name': SearchDatastore_Task, 'duration_secs': 0.021271} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.491498] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-154cadf6-630d-4cba-9058-2eecdf86e113 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.498885] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1468.498885] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5217f721-3725-3afc-2cb9-b67d84e7a444" [ 1468.498885] env[62508]: _type = "Task" [ 1468.498885] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.509062] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5217f721-3725-3afc-2cb9-b67d84e7a444, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.622922] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.623513] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1468.626427] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.026s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.626662] env[62508]: DEBUG nova.objects.instance [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lazy-loading 'resources' on Instance uuid e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1468.754285] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Successfully updated port: bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1468.782411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7be76a10-7f47-4333-a816-cb383003bd6a tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.040s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.795172] env[62508]: DEBUG nova.compute.manager [req-d1e40401-4a00-4534-b197-8a1dfaa5a110 req-2a95325e-dc64-417a-9bc7-4553518b8415 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Received event network-vif-deleted-5f1e6b41-10f8-488b-93cf-7cb619b5e80d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.795172] env[62508]: INFO nova.compute.manager [req-d1e40401-4a00-4534-b197-8a1dfaa5a110 req-2a95325e-dc64-417a-9bc7-4553518b8415 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Neutron deleted interface 5f1e6b41-10f8-488b-93cf-7cb619b5e80d; detaching it from the instance and deleting it from the info cache [ 1468.795742] env[62508]: DEBUG nova.network.neutron [req-d1e40401-4a00-4534-b197-8a1dfaa5a110 req-2a95325e-dc64-417a-9bc7-4553518b8415 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.797420] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1487d9a-a7ee-4391-a099-a816e87c0061 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.832628] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.833650] env[62508]: DEBUG nova.compute.manager [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.834899] env[62508]: DEBUG oslo_concurrency.lockutils [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.834899] env[62508]: DEBUG oslo_concurrency.lockutils [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.834899] env[62508]: DEBUG oslo_concurrency.lockutils [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.834899] env[62508]: DEBUG nova.compute.manager [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] No waiting events found dispatching network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1468.834899] env[62508]: WARNING nova.compute.manager [req-2af28c13-07d9-48e3-9696-398e4589b243 req-db6d8b45-c1ef-4232-9ad0-864322f289b9 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received unexpected event network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f for instance with vm_state building and task_state spawning. [ 1468.838014] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d380f67-adab-408b-a284-13040c639f9e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.846929] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775798, 'name': CloneVM_Task, 'duration_secs': 1.741696} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.849405] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Created linked-clone VM from snapshot [ 1468.849405] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1468.849405] env[62508]: value = "task-1775805" [ 1468.849405] env[62508]: _type = "Task" [ 1468.849405] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.850045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855a42af-6b17-4f60-a3d9-c0c989639b1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.863529] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.867200] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Uploading image 565b7594-dc52-4296-bae4-f407a85d24ef {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1468.879035] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1468.879369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7e731360-931d-4a40-b267-5782a0c63f38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.890025] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1468.890025] env[62508]: value = "task-1775806" [ 1468.890025] env[62508]: _type = "Task" [ 1468.890025] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.899541] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775806, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.914038] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775804, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.010644] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5217f721-3725-3afc-2cb9-b67d84e7a444, 'name': SearchDatastore_Task, 'duration_secs': 0.031652} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.010948] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.011235] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 879f1e09-8b21-4f89-bc00-04e3d6710662/879f1e09-8b21-4f89-bc00-04e3d6710662.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1469.011530] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb7ef36f-1010-485f-8d6d-f911e244c6ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.020132] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1469.020132] env[62508]: value = "task-1775807" [ 1469.020132] env[62508]: _type = "Task" [ 1469.020132] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.033587] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.133440] env[62508]: DEBUG nova.compute.utils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.135219] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1469.135439] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1469.185791] env[62508]: DEBUG nova.network.neutron [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.207074] env[62508]: DEBUG nova.policy [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df8b8a7ad1fe40adb3ff61fe8a3cfb3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1e8f147289480aa4ecab1500a0e3cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1469.262238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.265188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.265542] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1469.285167] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.302312] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a89f6f35-ff34-4de4-a586-82cf6dbc10c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.315140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbb2e6d-4b3c-48c1-8ff6-1e79007ccdb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.364092] env[62508]: DEBUG nova.compute.manager [req-d1e40401-4a00-4534-b197-8a1dfaa5a110 req-2a95325e-dc64-417a-9bc7-4553518b8415 service nova] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Detach interface failed, port_id=5f1e6b41-10f8-488b-93cf-7cb619b5e80d, reason: Instance fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1469.376714] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775805, 'name': PowerOffVM_Task, 'duration_secs': 0.300269} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.376714] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1469.382404] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfiguring VM instance instance-00000029 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1469.386919] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1ee9b3b-8f41-4b38-b087-2367f0d31994 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.413902] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1469.413902] env[62508]: value = "task-1775808" [ 1469.413902] env[62508]: _type = "Task" [ 1469.413902] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.431044] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775806, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.431417] env[62508]: DEBUG oslo_vmware.api [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775804, 'name': PowerOnVM_Task, 'duration_secs': 0.761793} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.435475] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1469.435625] env[62508]: INFO nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1469.435791] env[62508]: DEBUG nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1469.436124] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775808, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.436943] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189cd40a-bb49-4050-baf3-3a01cf00b635 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.493197] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.493653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.537424] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775807, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.602193] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Successfully created port: f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1469.638876] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1469.688498] env[62508]: INFO nova.compute.manager [-] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Took 1.57 seconds to deallocate network for instance. [ 1469.723519] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8ea207-aa46-4c51-a708-a79669a44033 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.732277] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a5a786-7abc-49a9-978e-f798af1b3136 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.768569] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5d666a-43ce-4750-974c-29c6507d0cc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.779052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1972e988-5897-43c1-8e4d-c05f978fdc0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.800530] env[62508]: DEBUG nova.compute.provider_tree [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.807562] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1469.816702] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.909461] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Successfully created port: ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1469.915068] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775806, 'name': Destroy_Task, 'duration_secs': 0.739666} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.918401] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Destroyed the VM [ 1469.918707] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1469.918975] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef004519-8ddb-46ea-905e-f186593b141f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.927315] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1469.927315] env[62508]: value = "task-1775809" [ 1469.927315] env[62508]: _type = "Task" [ 1469.927315] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.931026] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.945757] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775809, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.963115] env[62508]: INFO nova.compute.manager [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Took 33.68 seconds to build instance. [ 1470.011257] env[62508]: DEBUG nova.network.neutron [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.036720] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582469} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.037132] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 879f1e09-8b21-4f89-bc00-04e3d6710662/879f1e09-8b21-4f89-bc00-04e3d6710662.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1470.037313] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1470.037499] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f572c65-1a5d-4bef-b1ba-4a066edb2e8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.045815] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1470.045815] env[62508]: value = "task-1775810" [ 1470.045815] env[62508]: _type = "Task" [ 1470.045815] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.060973] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.198627] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.303316] env[62508]: DEBUG nova.scheduler.client.report [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.425123] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775808, 'name': ReconfigVM_Task, 'duration_secs': 0.626028} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.425358] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Reconfigured VM instance instance-00000029 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1470.425580] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1470.425853] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d7d0db6-d447-49f7-816b-89b143f60207 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.435405] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1470.435405] env[62508]: value = "task-1775811" [ 1470.435405] env[62508]: _type = "Task" [ 1470.435405] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.443772] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775809, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.446747] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.464965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f802ff1-c8fe-4b22-ba8e-a3dd21b9b49c tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.436s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.518175] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.518575] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance network_info: |[{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1470.518976] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:6a:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd829efb7-e98e-4b67-bd03-b0888287dbfd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd3f776a-1eed-4e8d-b7f3-d958db372a2f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.527079] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating folder: Project (b9b1180071bc4cc2a419daac2f51e3f1). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.527613] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e600d6e9-79a9-492f-aa43-f631390057b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.540671] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created folder: Project (b9b1180071bc4cc2a419daac2f51e3f1) in parent group-v368536. [ 1470.540943] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating folder: Instances. Parent ref: group-v368673. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.541222] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87dfcc5d-f8b5-4475-8399-40c47e9938b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.551748] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created folder: Instances in parent group-v368673. [ 1470.551998] env[62508]: DEBUG oslo.service.loopingcall [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.552653] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.552880] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56e2ab70-2180-463c-aa5a-c212bb7be2e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.571311] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097942} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.571999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1470.572867] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43c4960-9fc3-4d5a-a5a6-691ef97efcd0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.578642] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.578642] env[62508]: value = "task-1775814" [ 1470.578642] env[62508]: _type = "Task" [ 1470.578642] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.601872] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 879f1e09-8b21-4f89-bc00-04e3d6710662/879f1e09-8b21-4f89-bc00-04e3d6710662.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1470.602369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-072b3515-8d59-489c-9540-6dec48fbc70c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.623602] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775814, 'name': CreateVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.629850] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1470.629850] env[62508]: value = "task-1775815" [ 1470.629850] env[62508]: _type = "Task" [ 1470.629850] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.649515] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.659126] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1470.685014] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1470.685014] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1470.685014] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.685014] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1470.685014] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.685333] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1470.685376] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1470.686065] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1470.686065] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1470.686065] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1470.686065] env[62508]: DEBUG nova.virt.hardware [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1470.687559] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da50bb7a-c167-43b5-bc6d-e6c606078f1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.697952] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d06c42-77fc-4c3e-8a2a-d2b52f93e804 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.808962] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.811516] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.605s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.811767] env[62508]: DEBUG nova.objects.instance [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lazy-loading 'resources' on Instance uuid 73452964-d690-451d-98c3-fba3c3301c6d {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1470.835799] env[62508]: INFO nova.scheduler.client.report [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Deleted allocations for instance e2d4c71b-1164-4c7d-9ffb-7f5489f92d32 [ 1470.954886] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775811, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.955188] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775809, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.968197] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1470.985432] env[62508]: DEBUG nova.compute.manager [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1470.985687] env[62508]: DEBUG nova.compute.manager [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing instance network info cache due to event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1470.986020] env[62508]: DEBUG oslo_concurrency.lockutils [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.986123] env[62508]: DEBUG oslo_concurrency.lockutils [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.986299] env[62508]: DEBUG nova.network.neutron [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1471.089117] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775814, 'name': CreateVM_Task, 'duration_secs': 0.48203} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.089339] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1471.090148] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.090364] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.090781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1471.091102] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d98daa7b-9903-4743-b19c-cc8d9857aab7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.096604] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1471.096604] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275b001-f04c-8965-8343-d88d2c8421c5" [ 1471.096604] env[62508]: _type = "Task" [ 1471.096604] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.105202] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275b001-f04c-8965-8343-d88d2c8421c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.141619] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775815, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.345345] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c0bd1af-ce89-4bd0-89c2-7ac4c0bd88d9 tempest-ServersAdminNegativeTestJSON-1385882346 tempest-ServersAdminNegativeTestJSON-1385882346-project-member] Lock "e2d4c71b-1164-4c7d-9ffb-7f5489f92d32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.839s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.454290] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775811, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.460848] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775809, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.493851] env[62508]: DEBUG nova.compute.manager [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1471.494805] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1a06e0-fa96-4830-b56e-58e882fa6719 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.501329] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.614159] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275b001-f04c-8965-8343-d88d2c8421c5, 'name': SearchDatastore_Task, 'duration_secs': 0.021578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.614466] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.614708] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1471.615505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.615505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.615505] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.616364] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb9783d7-f371-409a-b5db-3595faa189b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.630694] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.630958] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1471.634392] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-126131b6-e0ab-446e-9293-e3acc0c39ba0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.651714] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775815, 'name': ReconfigVM_Task, 'duration_secs': 0.80953} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.652129] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1471.652129] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e66b2d-be1a-d9e9-40de-e9de5cbbb45e" [ 1471.652129] env[62508]: _type = "Task" [ 1471.652129] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.652424] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 879f1e09-8b21-4f89-bc00-04e3d6710662/879f1e09-8b21-4f89-bc00-04e3d6710662.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1471.653156] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7bd02001-35f4-452a-aaf7-819ce6f3005f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.671767] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e66b2d-be1a-d9e9-40de-e9de5cbbb45e, 'name': SearchDatastore_Task, 'duration_secs': 0.015842} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.674984] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1471.674984] env[62508]: value = "task-1775816" [ 1471.674984] env[62508]: _type = "Task" [ 1471.674984] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.678195] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c74624-783c-4171-b12c-6704f5343141 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.695271] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1471.695271] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528316f9-0ce7-728f-b663-cedba33f3140" [ 1471.695271] env[62508]: _type = "Task" [ 1471.695271] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.699837] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775816, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.713970] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528316f9-0ce7-728f-b663-cedba33f3140, 'name': SearchDatastore_Task, 'duration_secs': 0.013233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.714318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.714624] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1471.714966] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19e519fa-0120-4d2c-8475-62255bc287a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.727019] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1471.727019] env[62508]: value = "task-1775817" [ 1471.727019] env[62508]: _type = "Task" [ 1471.727019] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.735412] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.849609] env[62508]: DEBUG nova.network.neutron [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updated VIF entry in instance network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1471.849968] env[62508]: DEBUG nova.network.neutron [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.876482] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Successfully updated port: f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1471.927033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f503c3-6ab4-493c-b06d-0e250af9e183 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.941210] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d983b1-3c54-48d7-b71f-7a29e45338d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.953225] env[62508]: DEBUG oslo_vmware.api [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775809, 'name': RemoveSnapshot_Task, 'duration_secs': 1.759865} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.957041] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1471.993628] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67587f04-ed3f-4a83-b3c2-73fadd896079 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.996717] env[62508]: DEBUG oslo_vmware.api [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775811, 'name': PowerOnVM_Task, 'duration_secs': 1.299578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.997773] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1471.998031] env[62508]: DEBUG nova.compute.manager [None req-ea629940-16eb-4581-9f8f-13118b293b0b tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1471.999351] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686bfa5-a94e-4ab7-880f-511100857125 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.009205] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d653f4-2ccb-4124-a488-0bffb9a2f193 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.017432] env[62508]: INFO nova.compute.manager [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] instance snapshotting [ 1472.021097] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aad730-1cc8-4376-b4aa-c7f32775e2f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.056152] env[62508]: DEBUG nova.compute.provider_tree [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.058612] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af67f786-348a-44a1-9bfc-c3455ad0ddf4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.195363] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775816, 'name': Rename_Task, 'duration_secs': 0.2324} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.195657] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1472.195908] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f81902a4-8b3b-490d-a6a8-cd6145fabddc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.205223] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1472.205223] env[62508]: value = "task-1775818" [ 1472.205223] env[62508]: _type = "Task" [ 1472.205223] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.219100] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.236645] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775817, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.353230] env[62508]: DEBUG oslo_concurrency.lockutils [req-02295bcd-41d2-4cb8-805f-b8e2938da744 req-a8767ae2-12ed-42aa-a7e4-814afb1e8cd8 service nova] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.462893] env[62508]: WARNING nova.compute.manager [None req-01f15a35-3f79-40ec-97eb-764b6610596a tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Image not found during snapshot: nova.exception.ImageNotFound: Image 565b7594-dc52-4296-bae4-f407a85d24ef could not be found. [ 1472.562022] env[62508]: DEBUG nova.scheduler.client.report [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1472.576217] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1472.576847] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-17aa9447-b3c8-429b-83e4-5c4c27ade77d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.589241] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1472.589241] env[62508]: value = "task-1775819" [ 1472.589241] env[62508]: _type = "Task" [ 1472.589241] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.601899] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775819, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.720932] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.733775] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.734049] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1472.734271] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1472.734529] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1984c98-80b0-4b78-a6d1-73c9c5dfdaf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.742243] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1472.742243] env[62508]: value = "task-1775820" [ 1472.742243] env[62508]: _type = "Task" [ 1472.742243] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.752207] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.831107] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.831393] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.831611] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.831795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.832020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.834360] env[62508]: INFO nova.compute.manager [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Terminating instance [ 1472.836204] env[62508]: DEBUG nova.compute.manager [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1472.836394] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1472.837341] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf589a65-249c-4c9e-9a48-437c40b4ae1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.846284] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1472.846564] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd04a367-ac61-4b45-8f27-b8858ab0eb60 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.854629] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1472.854629] env[62508]: value = "task-1775821" [ 1472.854629] env[62508]: _type = "Task" [ 1472.854629] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.863538] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.021181] env[62508]: DEBUG nova.compute.manager [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-vif-plugged-f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.021181] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Acquiring lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.021181] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.021181] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.021181] env[62508]: DEBUG nova.compute.manager [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] No waiting events found dispatching network-vif-plugged-f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1473.022930] env[62508]: WARNING nova.compute.manager [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received unexpected event network-vif-plugged-f89bf0c0-faa6-4104-ad18-7048847b0aa0 for instance with vm_state building and task_state spawning. [ 1473.028992] env[62508]: DEBUG nova.compute.manager [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-changed-f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.028992] env[62508]: DEBUG nova.compute.manager [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Refreshing instance network info cache due to event network-changed-f89bf0c0-faa6-4104-ad18-7048847b0aa0. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1473.028992] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Acquiring lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.028992] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Acquired lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.028992] env[62508]: DEBUG nova.network.neutron [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Refreshing network info cache for port f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1473.064792] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.253s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.068638] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.918s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.072326] env[62508]: INFO nova.compute.claims [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.100275] env[62508]: INFO nova.scheduler.client.report [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Deleted allocations for instance 73452964-d690-451d-98c3-fba3c3301c6d [ 1473.107054] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775819, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.163023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "63fca45d-5922-4a14-9936-30070c349f8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.163023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.163023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "63fca45d-5922-4a14-9936-30070c349f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.163023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.163023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.166844] env[62508]: INFO nova.compute.manager [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Terminating instance [ 1473.174507] env[62508]: DEBUG nova.compute.manager [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1473.174953] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1473.176331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c6a71-5d89-4db3-91f3-85a80d24f0ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.186472] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1473.186799] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-459d3cb0-0baf-4b27-a33a-96d97354f963 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.197255] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1473.197255] env[62508]: value = "task-1775822" [ 1473.197255] env[62508]: _type = "Task" [ 1473.197255] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.211492] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.221584] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775818, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.254077] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206709} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.254373] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1473.255781] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f64415-8259-43f8-b2bb-6923b6a2d1e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.282269] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1473.282622] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4d4ab64-58de-453b-ae7d-28992d424f7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.305871] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1473.305871] env[62508]: value = "task-1775823" [ 1473.305871] env[62508]: _type = "Task" [ 1473.305871] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.317980] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.366635] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775821, 'name': PowerOffVM_Task, 'duration_secs': 0.334109} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.366986] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1473.367175] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1473.367463] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8aeb1849-8051-44f6-8609-56ef9b7227ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.478919] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1473.479240] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1473.479485] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleting the datastore file [datastore1] 9a3ef326-0fbf-4fd2-bb5e-3009bf661381 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.479821] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6be3823a-2e95-4c14-9bba-88a23107e114 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.491102] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1473.491102] env[62508]: value = "task-1775825" [ 1473.491102] env[62508]: _type = "Task" [ 1473.491102] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.500149] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.601218] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775819, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.620097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b265cfec-7520-476e-8e40-20ca63c33904 tempest-ServersTestJSON-507876772 tempest-ServersTestJSON-507876772-project-member] Lock "73452964-d690-451d-98c3-fba3c3301c6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.615s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.620097] env[62508]: DEBUG nova.network.neutron [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1473.710706] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775822, 'name': PowerOffVM_Task, 'duration_secs': 0.327187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.710706] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1473.710706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1473.710706] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27b62a47-6035-4e30-888d-97fa4af696b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.722444] env[62508]: DEBUG oslo_vmware.api [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775818, 'name': PowerOnVM_Task, 'duration_secs': 1.055955} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.722997] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1473.725192] env[62508]: INFO nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1473.725192] env[62508]: DEBUG nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1473.725192] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55331f7-0421-4676-a064-b287cc5cb1d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.749696] env[62508]: DEBUG nova.network.neutron [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.820986] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775823, 'name': ReconfigVM_Task, 'duration_secs': 0.306401} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.820986] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Reconfigured VM instance instance-0000002f to attach disk [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.820986] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5452f458-8063-4a16-bb85-78f876d09c2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.826957] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1473.826957] env[62508]: value = "task-1775827" [ 1473.826957] env[62508]: _type = "Task" [ 1473.826957] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.833164] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1473.834228] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1473.834587] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleting the datastore file [datastore1] 63fca45d-5922-4a14-9936-30070c349f8e {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.835564] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb0a6200-01da-4120-8945-33eaca23934a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.841871] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775827, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.849035] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1473.849035] env[62508]: value = "task-1775828" [ 1473.849035] env[62508]: _type = "Task" [ 1473.849035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.857339] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.001762] env[62508]: DEBUG oslo_vmware.api [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258974} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.002083] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1474.002310] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1474.002546] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1474.002770] env[62508]: INFO nova.compute.manager [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1474.003069] env[62508]: DEBUG oslo.service.loopingcall [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.003299] env[62508]: DEBUG nova.compute.manager [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1474.003494] env[62508]: DEBUG nova.network.neutron [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1474.100768] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775819, 'name': CreateSnapshot_Task, 'duration_secs': 1.098152} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.100994] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1474.101775] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a901d2-942c-4d2d-8940-a5176110c4a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.248052] env[62508]: INFO nova.compute.manager [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Took 36.90 seconds to build instance. [ 1474.254537] env[62508]: DEBUG oslo_concurrency.lockutils [req-78d7b061-ab0a-4f49-a91e-258335eb4972 req-e565ea50-edb6-4898-bae5-ac13af720d7f service nova] Releasing lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.350362] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775827, 'name': Rename_Task, 'duration_secs': 0.166646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.354163] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.354448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-121f220e-49eb-4129-9428-cd75ef8e289a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.368236] env[62508]: DEBUG oslo_vmware.api [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303566} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.369524] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1474.369709] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1474.369880] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1474.370074] env[62508]: INFO nova.compute.manager [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1474.370309] env[62508]: DEBUG oslo.service.loopingcall [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.371205] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1474.371205] env[62508]: value = "task-1775829" [ 1474.371205] env[62508]: _type = "Task" [ 1474.371205] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.371390] env[62508]: DEBUG nova.compute.manager [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1474.371390] env[62508]: DEBUG nova.network.neutron [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1474.389035] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.411322] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Successfully updated port: ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.450292] env[62508]: DEBUG nova.compute.manager [req-b323c74c-8b85-4111-ba7b-5a076279ff10 req-98d5e3f1-b69e-4d03-816e-8595877ffff4 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Received event network-vif-deleted-b63a3fdb-4813-40cc-8ee8-6478ef3e9640 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1474.450292] env[62508]: INFO nova.compute.manager [req-b323c74c-8b85-4111-ba7b-5a076279ff10 req-98d5e3f1-b69e-4d03-816e-8595877ffff4 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Neutron deleted interface b63a3fdb-4813-40cc-8ee8-6478ef3e9640; detaching it from the instance and deleting it from the info cache [ 1474.450292] env[62508]: DEBUG nova.network.neutron [req-b323c74c-8b85-4111-ba7b-5a076279ff10 req-98d5e3f1-b69e-4d03-816e-8595877ffff4 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.625864] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1474.629796] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e1a12c83-d67c-4ec4-a2ab-fe11ad7e32e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.649456] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1474.649456] env[62508]: value = "task-1775830" [ 1474.649456] env[62508]: _type = "Task" [ 1474.649456] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.652220] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac611aa9-6c56-43a7-aa31-253eaa80a36d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.674468] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775830, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.675022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ad23a8-8754-40e5-8466-6baadfa05c6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.715682] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0da429-015d-4536-ba27-7d49b627b6f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.729022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2ba236-89bd-4bcc-b4f7-903d9d2b4f15 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.744045] env[62508]: DEBUG nova.compute.provider_tree [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.751368] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1fa17ef0-7f3d-481f-8dc2-b1fa88fd5c40 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.416s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.842598] env[62508]: DEBUG nova.network.neutron [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.892853] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775829, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.917133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.917133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.917133] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.956211] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc5dceab-4fe1-4fcc-845e-66de075577ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.968616] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4160c812-3760-4ded-af7a-a960c2b4a635 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.016528] env[62508]: DEBUG nova.compute.manager [req-b323c74c-8b85-4111-ba7b-5a076279ff10 req-98d5e3f1-b69e-4d03-816e-8595877ffff4 service nova] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Detach interface failed, port_id=b63a3fdb-4813-40cc-8ee8-6478ef3e9640, reason: Instance 9a3ef326-0fbf-4fd2-bb5e-3009bf661381 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1475.163848] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775830, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.247762] env[62508]: DEBUG nova.scheduler.client.report [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.254069] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1475.274274] env[62508]: DEBUG nova.compute.manager [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-vif-plugged-ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1475.274274] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Acquiring lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.274274] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.274274] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.279715] env[62508]: DEBUG nova.compute.manager [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] No waiting events found dispatching network-vif-plugged-ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1475.279715] env[62508]: WARNING nova.compute.manager [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received unexpected event network-vif-plugged-ed7e3fd8-f819-44c2-8370-860c27d95f6b for instance with vm_state building and task_state spawning. [ 1475.279715] env[62508]: DEBUG nova.compute.manager [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-changed-ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1475.279715] env[62508]: DEBUG nova.compute.manager [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Refreshing instance network info cache due to event network-changed-ed7e3fd8-f819-44c2-8370-860c27d95f6b. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1475.280106] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Acquiring lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.295173] env[62508]: DEBUG nova.network.neutron [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.347756] env[62508]: INFO nova.compute.manager [-] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Took 1.34 seconds to deallocate network for instance. [ 1475.391635] env[62508]: DEBUG oslo_vmware.api [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775829, 'name': PowerOnVM_Task, 'duration_secs': 0.556243} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.394526] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.394848] env[62508]: INFO nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1475.396128] env[62508]: DEBUG nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1475.396379] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a6ae84-1b85-4c45-a93a-fc4e3ab55e4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.462194] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.662649] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775830, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.753747] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.754418] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1475.759639] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.128s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.790695] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.798363] env[62508]: INFO nova.compute.manager [-] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Took 1.43 seconds to deallocate network for instance. [ 1475.855443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.925750] env[62508]: INFO nova.compute.manager [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Took 35.85 seconds to build instance. [ 1475.984566] env[62508]: DEBUG nova.network.neutron [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updating instance_info_cache with network_info: [{"id": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "address": "fa:16:3e:53:72:4a", "network": {"id": "9006917b-78bf-463d-8363-6bdaf736b89e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-287042260", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf89bf0c0-fa", "ovs_interfaceid": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "address": "fa:16:3e:2b:86:68", "network": {"id": "cf10465e-6cd6-49b9-be6d-404c9cedbb35", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-734304522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped7e3fd8-f8", "ovs_interfaceid": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.168201] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775830, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.278533] env[62508]: DEBUG nova.compute.utils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1476.280540] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1476.280684] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1476.307923] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.367865] env[62508]: DEBUG nova.policy [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b50005b538ac45b8903a7429c789d28b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fbe87622c6445a685055d5cfccceaa1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1476.429123] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7bc3665-3a69-4516-870b-89e22b3e4932 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.896s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.489974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.489974] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance network_info: |[{"id": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "address": "fa:16:3e:53:72:4a", "network": {"id": "9006917b-78bf-463d-8363-6bdaf736b89e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-287042260", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf89bf0c0-fa", "ovs_interfaceid": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "address": "fa:16:3e:2b:86:68", "network": {"id": "cf10465e-6cd6-49b9-be6d-404c9cedbb35", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-734304522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped7e3fd8-f8", "ovs_interfaceid": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1476.489974] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Acquired lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.489974] env[62508]: DEBUG nova.network.neutron [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Refreshing network info cache for port ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.489974] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:72:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f89bf0c0-faa6-4104-ad18-7048847b0aa0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:86:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed7e3fd8-f819-44c2-8370-860c27d95f6b', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.508495] env[62508]: DEBUG oslo.service.loopingcall [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.514063] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.514718] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8a2b17b-651c-4b15-ae72-aa61fd9f81ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.540654] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.540654] env[62508]: value = "task-1775831" [ 1476.540654] env[62508]: _type = "Task" [ 1476.540654] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.558022] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775831, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.665610] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775830, 'name': CloneVM_Task, 'duration_secs': 1.864243} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.665927] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Created linked-clone VM from snapshot [ 1476.666996] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9fb154-ddc3-4d16-9564-68402dbc40dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.677316] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Uploading image 0516a45f-0a2d-4d7a-b7b5-26b1d9a35082 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1476.707990] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "879f1e09-8b21-4f89-bc00-04e3d6710662" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.708416] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.711630] env[62508]: INFO nova.compute.manager [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Rebooting instance [ 1476.714676] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1476.714676] env[62508]: value = "vm-368677" [ 1476.714676] env[62508]: _type = "VirtualMachine" [ 1476.714676] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1476.715406] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-64f6a86b-0158-4417-b3f9-8116d54c4c31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.720946] env[62508]: DEBUG nova.compute.manager [req-5508916d-fd73-4622-8c9a-0654acaca010 req-16b7dbd0-0301-4d4b-8660-7d4b735d84c3 service nova] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Received event network-vif-deleted-8fef6288-1754-4516-ae14-0dc489d4bb0f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.728856] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease: (returnval){ [ 1476.728856] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278c66e-8525-3f13-b92f-2ba05daac841" [ 1476.728856] env[62508]: _type = "HttpNfcLease" [ 1476.728856] env[62508]: } obtained for exporting VM: (result){ [ 1476.728856] env[62508]: value = "vm-368677" [ 1476.728856] env[62508]: _type = "VirtualMachine" [ 1476.728856] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1476.729227] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the lease: (returnval){ [ 1476.729227] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278c66e-8525-3f13-b92f-2ba05daac841" [ 1476.729227] env[62508]: _type = "HttpNfcLease" [ 1476.729227] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1476.740634] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1476.740634] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278c66e-8525-3f13-b92f-2ba05daac841" [ 1476.740634] env[62508]: _type = "HttpNfcLease" [ 1476.740634] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1476.784764] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating resource usage from migration 4fdfcd20-c840-423e-a6be-dfa3f61e5d01 [ 1476.789181] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1476.814764] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7d23d8f0-d7a9-4236-ad28-208e77b72138 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.815049] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b911f25d-711b-411e-bb2d-2e59386ff2ea is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.815118] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815180] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a226327d-11df-45e0-bef8-2337a0317c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815344] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 868cf942-f348-488d-b00a-af4c8b5efda5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815401] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815506] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 95a289ac-3178-45ea-80d2-905b9af54f3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815639] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.815756] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815870] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.815980] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.816115] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance db74146d-abc3-4d48-be1b-6ad471794dbf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.816239] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 70c8de27-4696-4005-bbec-e7a33e56311b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.816367] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.816484] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 38d294a9-2f51-438d-b942-a88e380a981f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.816594] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 63fca45d-5922-4a14-9936-30070c349f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.816711] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 9a3ef326-0fbf-4fd2-bb5e-3009bf661381 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1476.816837] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 06baedda-2926-4ec8-a4f6-d62713f48a26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.816918] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 45de6dd5-97f3-4eea-a171-0254a2b37a41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.817039] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 879f1e09-8b21-4f89-bc00-04e3d6710662 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.817192] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aedbd388-3ef7-410f-b0e3-5ea67ad56b65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.817308] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2b166aa9-9381-42c0-a607-7d610f08a4e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.817417] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 30e8b6ca-10fd-4e98-815d-1622f162b05c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1476.950318] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Successfully created port: f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1477.012550] env[62508]: DEBUG nova.network.neutron [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updated VIF entry in instance network info cache for port ed7e3fd8-f819-44c2-8370-860c27d95f6b. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.014427] env[62508]: DEBUG nova.network.neutron [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updating instance_info_cache with network_info: [{"id": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "address": "fa:16:3e:53:72:4a", "network": {"id": "9006917b-78bf-463d-8363-6bdaf736b89e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-287042260", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf89bf0c0-fa", "ovs_interfaceid": "f89bf0c0-faa6-4104-ad18-7048847b0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "address": "fa:16:3e:2b:86:68", "network": {"id": "cf10465e-6cd6-49b9-be6d-404c9cedbb35", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-734304522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped7e3fd8-f8", "ovs_interfaceid": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.052573] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775831, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.211555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "6ae078f6-3b96-4b49-b282-cae74d742c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.211925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.239964] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1477.239964] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278c66e-8525-3f13-b92f-2ba05daac841" [ 1477.239964] env[62508]: _type = "HttpNfcLease" [ 1477.239964] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1477.240336] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1477.240336] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278c66e-8525-3f13-b92f-2ba05daac841" [ 1477.240336] env[62508]: _type = "HttpNfcLease" [ 1477.240336] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1477.241571] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de44fa9-6ed8-4385-95ef-248b0fef26bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.245677] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.245927] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquired lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.246239] env[62508]: DEBUG nova.network.neutron [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.260532] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1477.262282] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1477.334620] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e07ab22e-bd07-4232-abfe-c0617c0b9813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1477.351146] env[62508]: DEBUG nova.compute.manager [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1477.351236] env[62508]: DEBUG nova.compute.manager [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing instance network info cache due to event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1477.351453] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] Acquiring lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.377730] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4590cf34-6d0a-4561-81ed-66aa1843c92a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.517383] env[62508]: DEBUG oslo_concurrency.lockutils [req-dfca2b22-39d4-48d7-a240-a4f93303f001 req-d11abb51-74a9-4be8-92d6-de88b9b60490 service nova] Releasing lock "refresh_cache-2b166aa9-9381-42c0-a607-7d610f08a4e3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.557241] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775831, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.716237] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1477.839156] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1477.843390] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 6afa4e73-64b4-4b10-b598-433f0c22ecb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1477.875305] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1477.877049] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1477.877411] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.877622] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1477.877771] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.877991] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1477.878666] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1477.878882] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1477.879122] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1477.881878] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1477.881878] env[62508]: DEBUG nova.virt.hardware [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1477.882669] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88e7092-70fa-44bc-bcf5-6553d9632a21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.896438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6a7ba3-fd60-44c4-aa8a-e0326cbcf3a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.058208] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775831, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.116451] env[62508]: DEBUG nova.network.neutron [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.247833] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.348827] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f307d4d5-e877-4d0a-951c-779c1d2e573b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1478.509233] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "a0245a18-638d-4c32-bea2-456408b5e001" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.509233] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.555868] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775831, 'name': CreateVM_Task, 'duration_secs': 1.794712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.557698] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1478.557698] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.557698] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.558212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1478.558655] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd0afd84-e44d-46fa-b995-40aa91c0a59e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.568029] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1478.568029] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a5074-3412-aebd-4bf1-f4c237f6fe04" [ 1478.568029] env[62508]: _type = "Task" [ 1478.568029] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.576483] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a5074-3412-aebd-4bf1-f4c237f6fe04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.626549] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Releasing lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.627545] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] Acquired lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.628054] env[62508]: DEBUG nova.network.neutron [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.635019] env[62508]: DEBUG nova.compute.manager [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1478.635019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a61b042-aaaa-4c12-8a60-9ace5ebc10c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.812022] env[62508]: DEBUG nova.compute.manager [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Received event network-vif-plugged-f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.812022] env[62508]: DEBUG oslo_concurrency.lockutils [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] Acquiring lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.812022] env[62508]: DEBUG oslo_concurrency.lockutils [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.812022] env[62508]: DEBUG oslo_concurrency.lockutils [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.812022] env[62508]: DEBUG nova.compute.manager [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] No waiting events found dispatching network-vif-plugged-f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.812022] env[62508]: WARNING nova.compute.manager [req-1d99cc47-44c3-425d-b881-200b8cba3b01 req-dfd8d2db-b4f7-4021-af9d-d10a86fc46ec service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Received unexpected event network-vif-plugged-f3739879-1b13-47e9-bec2-609738a10c75 for instance with vm_state building and task_state spawning. [ 1478.851433] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e156aef5-bb56-4c17-9e7e-9419b672c9cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1478.921433] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Successfully updated port: f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1479.083149] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524a5074-3412-aebd-4bf1-f4c237f6fe04, 'name': SearchDatastore_Task, 'duration_secs': 0.026738} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.083149] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.083149] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.083149] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.083149] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.083149] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.083149] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-659ba575-c515-48b6-8ceb-a3e275ca8a9f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.093853] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.094353] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.094877] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-674ef5bc-3a24-474b-bfa0-142188d32bbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.102407] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1479.102407] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5254ef5f-48f4-47b8-6e26-6947b214181b" [ 1479.102407] env[62508]: _type = "Task" [ 1479.102407] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.112309] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5254ef5f-48f4-47b8-6e26-6947b214181b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.356867] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 38289797-ecf5-4207-a164-d70228e4411d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1479.394170] env[62508]: DEBUG nova.network.neutron [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updated VIF entry in instance network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.394170] env[62508]: DEBUG nova.network.neutron [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.424098] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.424280] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquired lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.424440] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1479.616040] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5254ef5f-48f4-47b8-6e26-6947b214181b, 'name': SearchDatastore_Task, 'duration_secs': 0.013578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.616458] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a92ee61f-ff26-47b2-bbf7-2e56a7c812a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.623211] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1479.623211] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d5a9ff-a2cf-6df6-0d51-7d41880ff319" [ 1479.623211] env[62508]: _type = "Task" [ 1479.623211] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.633747] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d5a9ff-a2cf-6df6-0d51-7d41880ff319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.656208] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5613ff-9610-41dd-8a54-8fcf28f1be23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.665052] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Doing hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1479.665338] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0bfd9036-6c31-4ab7-9565-64ff4bcba5c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.673296] env[62508]: DEBUG oslo_vmware.api [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1479.673296] env[62508]: value = "task-1775833" [ 1479.673296] env[62508]: _type = "Task" [ 1479.673296] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.682626] env[62508]: DEBUG oslo_vmware.api [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775833, 'name': ResetVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.860161] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f456dd83-6350-46b2-b06c-41dc5c477358 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1479.860336] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Migration 4fdfcd20-c840-423e-a6be-dfa3f61e5d01 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1479.860476] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1479.896244] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2750a7-75b7-4b2e-95a1-1472fff855f2 req-8e8ec340-90d6-4cc6-aa78-ec0c0d561a2d service nova] Releasing lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.965423] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1480.138041] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d5a9ff-a2cf-6df6-0d51-7d41880ff319, 'name': SearchDatastore_Task, 'duration_secs': 0.016646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.138398] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.138741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2b166aa9-9381-42c0-a607-7d610f08a4e3/2b166aa9-9381-42c0-a607-7d610f08a4e3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.140298] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7eb7ca9-4efc-49d8-ac64-a98ccc967835 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.152605] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1480.152605] env[62508]: value = "task-1775834" [ 1480.152605] env[62508]: _type = "Task" [ 1480.152605] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.153623] env[62508]: DEBUG nova.network.neutron [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Updating instance_info_cache with network_info: [{"id": "f3739879-1b13-47e9-bec2-609738a10c75", "address": "fa:16:3e:ce:48:b9", "network": {"id": "601a5164-8180-4a0c-8fdd-9dc7e3327947", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-238276486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fbe87622c6445a685055d5cfccceaa1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3739879-1b", "ovs_interfaceid": "f3739879-1b13-47e9-bec2-609738a10c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.168268] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.185682] env[62508]: DEBUG oslo_vmware.api [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775833, 'name': ResetVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.363830] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1480.665924] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Releasing lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.666340] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Instance network_info: |[{"id": "f3739879-1b13-47e9-bec2-609738a10c75", "address": "fa:16:3e:ce:48:b9", "network": {"id": "601a5164-8180-4a0c-8fdd-9dc7e3327947", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-238276486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fbe87622c6445a685055d5cfccceaa1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3739879-1b", "ovs_interfaceid": "f3739879-1b13-47e9-bec2-609738a10c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1480.666873] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:48:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55bd18a7-39a8-4d07-9088-9b944f9ff710', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3739879-1b13-47e9-bec2-609738a10c75', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.675495] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Creating folder: Project (7fbe87622c6445a685055d5cfccceaa1). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.676598] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a86e4a50-584d-4793-b2e8-825c10b380ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.689151] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.697487] env[62508]: DEBUG oslo_vmware.api [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775833, 'name': ResetVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.703730] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Created folder: Project (7fbe87622c6445a685055d5cfccceaa1) in parent group-v368536. [ 1480.703730] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Creating folder: Instances. Parent ref: group-v368679. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.703730] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-238381e5-e0fb-47c2-b47e-1f02c31ec4c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.715969] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Created folder: Instances in parent group-v368679. [ 1480.715969] env[62508]: DEBUG oslo.service.loopingcall [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.720177] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1480.720755] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-672e0251-01c5-4bbf-aa48-3beb9b477381 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.744017] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.744017] env[62508]: value = "task-1775837" [ 1480.744017] env[62508]: _type = "Task" [ 1480.744017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.757325] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775837, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.867180] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 6ae078f6-3b96-4b49-b282-cae74d742c97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1480.867603] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1480.867680] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1480.877567] env[62508]: DEBUG nova.compute.manager [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Received event network-changed-f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.877660] env[62508]: DEBUG nova.compute.manager [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Refreshing instance network info cache due to event network-changed-f3739879-1b13-47e9-bec2-609738a10c75. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1480.877895] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] Acquiring lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.878069] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] Acquired lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.878281] env[62508]: DEBUG nova.network.neutron [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Refreshing network info cache for port f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1481.180379] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775834, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.195943] env[62508]: DEBUG oslo_vmware.api [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775833, 'name': ResetVM_Task, 'duration_secs': 1.106431} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.196269] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Did hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1481.196489] env[62508]: DEBUG nova.compute.manager [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.197351] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec649a4-ed30-4142-a1ec-f46ee8d11e75 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.259411] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775837, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.440331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006a491a-bcf4-403e-a4f9-2158711862b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.448544] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b649d8-4ee6-4892-8a1e-eb5617ecfba8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.491509] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e343ef-d770-4345-a814-40b5251b86fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.500426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23041328-63d8-4012-b14a-6fde4eec5592 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.518710] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1481.678890] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775834, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.316895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.679212] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2b166aa9-9381-42c0-a607-7d610f08a4e3/2b166aa9-9381-42c0-a607-7d610f08a4e3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.679866] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.679866] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff596985-a39d-4b38-b604-a8f37155df5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.689183] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1481.689183] env[62508]: value = "task-1775838" [ 1481.689183] env[62508]: _type = "Task" [ 1481.689183] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.702685] env[62508]: DEBUG nova.network.neutron [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Updated VIF entry in instance network info cache for port f3739879-1b13-47e9-bec2-609738a10c75. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1481.703723] env[62508]: DEBUG nova.network.neutron [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Updating instance_info_cache with network_info: [{"id": "f3739879-1b13-47e9-bec2-609738a10c75", "address": "fa:16:3e:ce:48:b9", "network": {"id": "601a5164-8180-4a0c-8fdd-9dc7e3327947", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-238276486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fbe87622c6445a685055d5cfccceaa1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3739879-1b", "ovs_interfaceid": "f3739879-1b13-47e9-bec2-609738a10c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.704564] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775838, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.713151] env[62508]: DEBUG oslo_concurrency.lockutils [None req-660124f6-2a12-4e4c-878b-656ee7480690 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.005s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.760456] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775837, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.023695] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1482.082497] env[62508]: DEBUG nova.compute.manager [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1482.082732] env[62508]: DEBUG nova.compute.manager [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing instance network info cache due to event network-changed-7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1482.082947] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] Acquiring lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.084083] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] Acquired lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.084357] env[62508]: DEBUG nova.network.neutron [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Refreshing network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1482.090113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.090113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.201945] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775838, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076421} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.202238] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1482.203144] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f9c001-6d95-4ee5-9d50-7b195db29f17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.206041] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dc9e479-df29-45d7-8340-f89259371c04 req-ab0a1154-ad37-4d76-a651-95f41b96fc50 service nova] Releasing lock "refresh_cache-30e8b6ca-10fd-4e98-815d-1622f162b05c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.232568] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 2b166aa9-9381-42c0-a607-7d610f08a4e3/2b166aa9-9381-42c0-a607-7d610f08a4e3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1482.234283] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46a416df-5b53-441f-80f6-404576484e88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.260859] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775837, 'name': CreateVM_Task, 'duration_secs': 1.406283} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.263607] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1482.263807] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1482.263807] env[62508]: value = "task-1775839" [ 1482.263807] env[62508]: _type = "Task" [ 1482.263807] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.266622] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.266785] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.267330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1482.267872] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65d4aee7-9c5e-4ca3-9feb-4af5c4df6533 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.278354] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1482.278354] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5263eb67-7c1a-3f44-2860-3f0b0be1633a" [ 1482.278354] env[62508]: _type = "Task" [ 1482.278354] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.282684] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775839, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.293184] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5263eb67-7c1a-3f44-2860-3f0b0be1633a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.529361] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1482.529692] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.770s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.530048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.178s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.531711] env[62508]: INFO nova.compute.claims [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1482.769770] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "879f1e09-8b21-4f89-bc00-04e3d6710662" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.770275] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.770275] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.770542] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.770822] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.780062] env[62508]: INFO nova.compute.manager [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Terminating instance [ 1482.785023] env[62508]: DEBUG nova.compute.manager [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1482.785023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1482.785023] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025fe4fb-09c0-434a-818f-f1dc45669de1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.793757] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775839, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.802391] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1482.806833] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f83488a-5822-4816-8cae-a2136b7ae411 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.808962] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5263eb67-7c1a-3f44-2860-3f0b0be1633a, 'name': SearchDatastore_Task, 'duration_secs': 0.017285} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.809313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.809541] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1482.809774] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.809954] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.810169] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1482.811054] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69c6a3b6-5072-4eea-98e6-fcdb24b81740 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.816539] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1482.816539] env[62508]: value = "task-1775840" [ 1482.816539] env[62508]: _type = "Task" [ 1482.816539] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.823429] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1482.823658] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1482.824680] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aed0835-604c-4b8f-9039-4f763f007bce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.831139] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.835724] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1482.835724] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523a89b8-3867-60b0-c992-3f14f1cb16f8" [ 1482.835724] env[62508]: _type = "Task" [ 1482.835724] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.840830] env[62508]: DEBUG nova.network.neutron [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updated VIF entry in instance network info cache for port 7b7bf998-34c3-4a34-9404-ba7189648de8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1482.841503] env[62508]: DEBUG nova.network.neutron [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [{"id": "7b7bf998-34c3-4a34-9404-ba7189648de8", "address": "fa:16:3e:00:89:b4", "network": {"id": "97fee1f8-09a7-4a1a-bca0-16b26a3c0207", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2039645983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a34358e29dde46139ee4aa5c8f57d0d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b7bf998-34", "ovs_interfaceid": "7b7bf998-34c3-4a34-9404-ba7189648de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.850077] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523a89b8-3867-60b0-c992-3f14f1cb16f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.283578] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775839, 'name': ReconfigVM_Task, 'duration_secs': 0.568751} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.283578] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 2b166aa9-9381-42c0-a607-7d610f08a4e3/2b166aa9-9381-42c0-a607-7d610f08a4e3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1483.283578] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00a790a0-7427-4698-ba9b-5a5222d854f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.290729] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1483.290729] env[62508]: value = "task-1775841" [ 1483.290729] env[62508]: _type = "Task" [ 1483.290729] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.300125] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775841, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.328281] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775840, 'name': PowerOffVM_Task, 'duration_secs': 0.215789} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.328609] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1483.328740] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1483.329011] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-356b1cdf-ac10-4ae7-843b-d3408ad2406b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.350304] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d15865c-c39c-4946-acf0-849f712ea33a req-8e8ffc2a-c183-490f-8b04-6a6a6c96ade0 service nova] Releasing lock "refresh_cache-879f1e09-8b21-4f89-bc00-04e3d6710662" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.350759] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523a89b8-3867-60b0-c992-3f14f1cb16f8, 'name': SearchDatastore_Task, 'duration_secs': 0.016146} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.351585] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dcb0f45-f88b-47c6-a706-435e90dbd921 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.362807] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1483.362807] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52671878-df90-95a7-c825-0e58a9aa2429" [ 1483.362807] env[62508]: _type = "Task" [ 1483.362807] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.373254] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52671878-df90-95a7-c825-0e58a9aa2429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.415076] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1483.415320] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1483.415513] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleting the datastore file [datastore1] 879f1e09-8b21-4f89-bc00-04e3d6710662 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1483.415802] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f252a91-9b5d-411f-b03c-a91ad76cb428 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.423948] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1483.423948] env[62508]: value = "task-1775843" [ 1483.423948] env[62508]: _type = "Task" [ 1483.423948] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.433964] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.804904] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775841, 'name': Rename_Task, 'duration_secs': 0.201594} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.804904] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1483.805268] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39558297-b4f2-4268-847b-5b2092de9c13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.813584] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1483.813584] env[62508]: value = "task-1775844" [ 1483.813584] env[62508]: _type = "Task" [ 1483.813584] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.825746] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.876121] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52671878-df90-95a7-c825-0e58a9aa2429, 'name': SearchDatastore_Task, 'duration_secs': 0.013453} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.876421] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.876680] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 30e8b6ca-10fd-4e98-815d-1622f162b05c/30e8b6ca-10fd-4e98-815d-1622f162b05c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1483.876969] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b33822f-bdf7-45e5-a403-b73c52b682ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.886259] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1483.886259] env[62508]: value = "task-1775845" [ 1483.886259] env[62508]: _type = "Task" [ 1483.886259] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.898071] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775845, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.936891] env[62508]: DEBUG oslo_vmware.api [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163525} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.937184] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1483.937397] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1483.937664] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1483.937763] env[62508]: INFO nova.compute.manager [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1483.938073] env[62508]: DEBUG oslo.service.loopingcall [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1483.940568] env[62508]: DEBUG nova.compute.manager [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1483.940727] env[62508]: DEBUG nova.network.neutron [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1484.030583] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272b36c3-0729-4c78-9f79-b81dfab12b74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.039611] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c602b1d-7b29-43e9-8559-21889b86508b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.080899] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5b4a75-e030-4de5-af48-1108299db562 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.090053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8b6d35-4969-4efc-9d0e-c8c41832482b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.107438] env[62508]: DEBUG nova.compute.provider_tree [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1484.326692] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775844, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.332931] env[62508]: DEBUG nova.compute.manager [req-b3e41696-073f-4eae-b7ba-f6bc68c375fb req-3f85b85c-42a6-413a-b284-3bb6cb7a01bb service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Received event network-vif-deleted-7b7bf998-34c3-4a34-9404-ba7189648de8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.333226] env[62508]: INFO nova.compute.manager [req-b3e41696-073f-4eae-b7ba-f6bc68c375fb req-3f85b85c-42a6-413a-b284-3bb6cb7a01bb service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Neutron deleted interface 7b7bf998-34c3-4a34-9404-ba7189648de8; detaching it from the instance and deleting it from the info cache [ 1484.333459] env[62508]: DEBUG nova.network.neutron [req-b3e41696-073f-4eae-b7ba-f6bc68c375fb req-3f85b85c-42a6-413a-b284-3bb6cb7a01bb service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.397879] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775845, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.611839] env[62508]: DEBUG nova.scheduler.client.report [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1484.752442] env[62508]: DEBUG nova.network.neutron [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.827371] env[62508]: DEBUG oslo_vmware.api [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775844, 'name': PowerOnVM_Task, 'duration_secs': 0.763828} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.827960] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1484.828180] env[62508]: INFO nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Took 14.17 seconds to spawn the instance on the hypervisor. [ 1484.828365] env[62508]: DEBUG nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1484.829275] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8769e2-59cb-4277-9b9a-3a2da38de6f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.838525] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a2a979f-1446-4704-84de-f2432045ec32 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.848891] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4f74e2-e876-4231-b7ba-10761edc485a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.889653] env[62508]: DEBUG nova.compute.manager [req-b3e41696-073f-4eae-b7ba-f6bc68c375fb req-3f85b85c-42a6-413a-b284-3bb6cb7a01bb service nova] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Detach interface failed, port_id=7b7bf998-34c3-4a34-9404-ba7189648de8, reason: Instance 879f1e09-8b21-4f89-bc00-04e3d6710662 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1484.902248] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775845, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529488} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.902248] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 30e8b6ca-10fd-4e98-815d-1622f162b05c/30e8b6ca-10fd-4e98-815d-1622f162b05c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1484.902524] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1484.902783] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dc67630-f257-47cf-90f7-039d6c1322f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.910628] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1484.910628] env[62508]: value = "task-1775846" [ 1484.910628] env[62508]: _type = "Task" [ 1484.910628] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.919732] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.117736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.118305] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1485.121316] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.462s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.121564] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.123600] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.774s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.123746] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.126344] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.355s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.127714] env[62508]: INFO nova.compute.claims [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1485.153423] env[62508]: INFO nova.scheduler.client.report [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleted allocations for instance b911f25d-711b-411e-bb2d-2e59386ff2ea [ 1485.158541] env[62508]: INFO nova.scheduler.client.report [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Deleted allocations for instance 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5 [ 1485.255730] env[62508]: INFO nova.compute.manager [-] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Took 1.31 seconds to deallocate network for instance. [ 1485.348425] env[62508]: INFO nova.compute.manager [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Took 43.02 seconds to build instance. [ 1485.421718] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.422014] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1485.422840] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e904e296-0a92-4c43-85f8-93b60399392a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.448017] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 30e8b6ca-10fd-4e98-815d-1622f162b05c/30e8b6ca-10fd-4e98-815d-1622f162b05c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1485.448343] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7087bab7-7840-49f8-9abb-35b56fdc4a92 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.469060] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1485.469060] env[62508]: value = "task-1775847" [ 1485.469060] env[62508]: _type = "Task" [ 1485.469060] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.478207] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.635351] env[62508]: DEBUG nova.compute.utils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1485.636579] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1485.636749] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1485.660965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9fba7351-a4ff-41a1-9ace-060655b7e8e3 tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "b911f25d-711b-411e-bb2d-2e59386ff2ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.092s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.666038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfdf4ecf-a5be-4801-9f97-8e7ae85920ff tempest-ServerMetadataNegativeTestJSON-487385805 tempest-ServerMetadataNegativeTestJSON-487385805-project-member] Lock "2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.396s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.701630] env[62508]: DEBUG nova.policy [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4e5f76e5d1e42838eda29c2c110c17f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51842f3e9f83452789923afbafd40bc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1485.762361] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.852255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c64c1c87-ec4f-4280-a798-e827d44d8da1 tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.685s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.980481] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775847, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.065618] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Successfully created port: 971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1486.103180] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1486.104656] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b279ab-d5ca-4ed8-b58d-ec5b75eadb44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.113439] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1486.113648] env[62508]: ERROR oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk due to incomplete transfer. [ 1486.113931] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5940d07f-3da2-4d4d-8de7-394759c08353 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.123548] env[62508]: DEBUG oslo_vmware.rw_handles [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c40c21-2bfd-e2a0-c9c2-ba1c3bd33b40/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1486.123832] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Uploaded image 0516a45f-0a2d-4d7a-b7b5-26b1d9a35082 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1486.126179] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1486.126448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-af61a624-06b7-4ea3-859d-1f5b08a6f2c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.134051] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1486.134051] env[62508]: value = "task-1775848" [ 1486.134051] env[62508]: _type = "Task" [ 1486.134051] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.145940] env[62508]: DEBUG nova.compute.utils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.155550] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775848, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.278011] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.278293] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.278532] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.278723] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.278896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.281149] env[62508]: INFO nova.compute.manager [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Terminating instance [ 1486.283113] env[62508]: DEBUG nova.compute.manager [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1486.283353] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1486.284664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990fc11e-9ee6-46b1-9d67-06414f9982ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.297366] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1486.297493] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2f9fb15-6cc4-4afb-878c-66302ae505dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.312818] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1486.312818] env[62508]: value = "task-1775849" [ 1486.312818] env[62508]: _type = "Task" [ 1486.312818] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.323687] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775849, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.354054] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1486.483392] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775847, 'name': ReconfigVM_Task, 'duration_secs': 0.576093} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.483754] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 30e8b6ca-10fd-4e98-815d-1622f162b05c/30e8b6ca-10fd-4e98-815d-1622f162b05c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1486.484485] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83f3716f-a00e-4184-b476-46bb7ceffe0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.492885] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1486.492885] env[62508]: value = "task-1775850" [ 1486.492885] env[62508]: _type = "Task" [ 1486.492885] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.505834] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775850, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.653032] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1486.662476] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775848, 'name': Destroy_Task, 'duration_secs': 0.396737} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.663141] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Destroyed the VM [ 1486.663484] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1486.663808] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4685316f-39aa-4513-a295-1cca5a87c6ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.671976] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1486.671976] env[62508]: value = "task-1775851" [ 1486.671976] env[62508]: _type = "Task" [ 1486.671976] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.689925] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775851, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.729564] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ae1c9c-50a4-4c25-9edf-053f169c2d7a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.738827] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb01b13-f84b-4d7e-932e-c8207bac2db6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.776107] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2881301e-e08d-4bf0-a2ac-942456ae06bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.784541] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a848d31-db74-4ce5-a321-664779826631 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.799816] env[62508]: DEBUG nova.compute.provider_tree [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1486.824038] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775849, 'name': PowerOffVM_Task, 'duration_secs': 0.237281} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.824038] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1486.824207] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1486.824370] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b85a63b-3e4d-4545-aa24-7fb523ced60b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.876291] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.971747] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1486.971994] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1486.972818] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleting the datastore file [datastore1] 2b166aa9-9381-42c0-a607-7d610f08a4e3 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1486.972818] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89786bd8-4a18-400e-be1d-b8c842cf1f6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.981758] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for the task: (returnval){ [ 1486.981758] env[62508]: value = "task-1775853" [ 1486.981758] env[62508]: _type = "Task" [ 1486.981758] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.998304] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.006914] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775850, 'name': Rename_Task, 'duration_secs': 0.20082} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.007276] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.007533] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7844a08a-e152-4f7e-a9b5-778e1f4895b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.018060] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1487.018060] env[62508]: value = "task-1775854" [ 1487.018060] env[62508]: _type = "Task" [ 1487.018060] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.027269] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.183256] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775851, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.303094] env[62508]: DEBUG nova.scheduler.client.report [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1487.497236] env[62508]: DEBUG oslo_vmware.api [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Task: {'id': task-1775853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31388} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.497678] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1487.497983] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1487.498309] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1487.498613] env[62508]: INFO nova.compute.manager [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1487.499009] env[62508]: DEBUG oslo.service.loopingcall [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.499994] env[62508]: DEBUG nova.compute.manager [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1487.499994] env[62508]: DEBUG nova.network.neutron [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1487.532012] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.670864] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1487.695214] env[62508]: DEBUG oslo_vmware.api [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775851, 'name': RemoveSnapshot_Task, 'duration_secs': 0.876608} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.695214] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1487.698018] env[62508]: INFO nova.compute.manager [None req-9820c9b2-90b9-4d0d-9c8b-aeaa2b7a383e tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 1487.712292] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:08:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1486991589',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-691255233',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1487.712927] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1487.716019] env[62508]: DEBUG nova.virt.hardware [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1487.716019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cfded7-064c-47e5-9abe-044cc8b1c462 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.728132] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8428b02-d119-4d9d-b57a-4d2d1a3adfa5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.812888] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.812888] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1487.815701] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.369s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.817507] env[62508]: INFO nova.compute.claims [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1487.849334] env[62508]: DEBUG nova.compute.manager [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Received event network-vif-plugged-971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.849593] env[62508]: DEBUG oslo_concurrency.lockutils [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] Acquiring lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.849984] env[62508]: DEBUG oslo_concurrency.lockutils [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.850246] env[62508]: DEBUG oslo_concurrency.lockutils [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.850516] env[62508]: DEBUG nova.compute.manager [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] No waiting events found dispatching network-vif-plugged-971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1487.850785] env[62508]: WARNING nova.compute.manager [req-048fa605-4b26-49e5-8ebc-07d29c7c0941 req-a08c3f97-ffbf-466a-b844-fbafd56a266f service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Received unexpected event network-vif-plugged-971a1f15-0acf-4dbe-a120-b172a0eb9552 for instance with vm_state building and task_state spawning. [ 1487.904939] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Successfully updated port: 971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1487.917851] env[62508]: DEBUG nova.compute.manager [req-29063a59-3c82-48a0-b1f6-65a11e994580 req-c264c35f-b57e-4e7e-aa0f-09b8c4b88d3d service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-vif-deleted-f89bf0c0-faa6-4104-ad18-7048847b0aa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.917851] env[62508]: INFO nova.compute.manager [req-29063a59-3c82-48a0-b1f6-65a11e994580 req-c264c35f-b57e-4e7e-aa0f-09b8c4b88d3d service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Neutron deleted interface f89bf0c0-faa6-4104-ad18-7048847b0aa0; detaching it from the instance and deleting it from the info cache [ 1487.918166] env[62508]: DEBUG nova.network.neutron [req-29063a59-3c82-48a0-b1f6-65a11e994580 req-c264c35f-b57e-4e7e-aa0f-09b8c4b88d3d service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updating instance_info_cache with network_info: [{"id": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "address": "fa:16:3e:2b:86:68", "network": {"id": "cf10465e-6cd6-49b9-be6d-404c9cedbb35", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-734304522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae1e8f147289480aa4ecab1500a0e3cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped7e3fd8-f8", "ovs_interfaceid": "ed7e3fd8-f819-44c2-8370-860c27d95f6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.030625] env[62508]: DEBUG oslo_vmware.api [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775854, 'name': PowerOnVM_Task, 'duration_secs': 0.76139} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.030625] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.030625] env[62508]: INFO nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Took 10.19 seconds to spawn the instance on the hypervisor. [ 1488.030625] env[62508]: DEBUG nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1488.030907] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbfdc51-919a-403a-9639-90bb1a6b97ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.325130] env[62508]: DEBUG nova.compute.utils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1488.328961] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1488.329168] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1488.389453] env[62508]: DEBUG nova.policy [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49f28fa1ac51489291ee44ac8e09ca51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2b18b2f1d6d439382b9e0fc89b2a3aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1488.409011] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.409011] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.409011] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1488.425144] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af154501-0760-4c23-9620-b4b0d9066928 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.438753] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feed506-8e19-4662-b2a3-7ef058c36afb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.477209] env[62508]: DEBUG nova.compute.manager [req-29063a59-3c82-48a0-b1f6-65a11e994580 req-c264c35f-b57e-4e7e-aa0f-09b8c4b88d3d service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Detach interface failed, port_id=f89bf0c0-faa6-4104-ad18-7048847b0aa0, reason: Instance 2b166aa9-9381-42c0-a607-7d610f08a4e3 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1488.548292] env[62508]: INFO nova.compute.manager [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Took 41.45 seconds to build instance. [ 1488.574032] env[62508]: DEBUG nova.network.neutron [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.722935] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Successfully created port: d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1488.833018] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1488.977149] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1489.050316] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f50a8081-356c-481d-90c7-990a6f5b5644 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.966s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.076783] env[62508]: INFO nova.compute.manager [-] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Took 1.58 seconds to deallocate network for instance. [ 1489.269721] env[62508]: DEBUG nova.network.neutron [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updating instance_info_cache with network_info: [{"id": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "address": "fa:16:3e:bc:21:9f", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971a1f15-0a", "ovs_interfaceid": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.355322] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d15fa68-a711-430a-bd97-81805582df25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.363771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f152566f-f12f-4bc2-9aac-01b37cbe5c45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.400121] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4cc138-4359-4a9a-a33e-bc106fc03081 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.409356] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777049b2-68e4-433c-b336-b5943573cc61 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.425444] env[62508]: DEBUG nova.compute.provider_tree [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.503766] env[62508]: DEBUG nova.compute.manager [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1489.504719] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c51909-a3fd-4821-8b8a-47f0076eefce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.557915] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1489.587684] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.771984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.772356] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Instance network_info: |[{"id": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "address": "fa:16:3e:bc:21:9f", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971a1f15-0a", "ovs_interfaceid": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1489.772931] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:21:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '971a1f15-0acf-4dbe-a120-b172a0eb9552', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1489.781470] env[62508]: DEBUG oslo.service.loopingcall [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1489.781745] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1489.782009] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93b97218-5852-4dbb-9e92-911b7ce17673 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.804707] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1489.804707] env[62508]: value = "task-1775855" [ 1489.804707] env[62508]: _type = "Task" [ 1489.804707] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.815246] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775855, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.845217] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1489.873817] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1489.873817] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1489.873817] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1489.873817] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1489.873817] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1489.874202] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1489.874202] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1489.874296] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1489.874463] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1489.874643] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1489.874824] env[62508]: DEBUG nova.virt.hardware [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1489.875741] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc150494-01f6-47c7-b46c-7d67915462c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.884254] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62068b0e-bf0f-4ef0-a2a9-e8a279f9ea9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.890632] env[62508]: DEBUG nova.compute.manager [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Received event network-changed-971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.890878] env[62508]: DEBUG nova.compute.manager [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Refreshing instance network info cache due to event network-changed-971a1f15-0acf-4dbe-a120-b172a0eb9552. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1489.891117] env[62508]: DEBUG oslo_concurrency.lockutils [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] Acquiring lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.891319] env[62508]: DEBUG oslo_concurrency.lockutils [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] Acquired lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.891447] env[62508]: DEBUG nova.network.neutron [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Refreshing network info cache for port 971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.930832] env[62508]: DEBUG nova.scheduler.client.report [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1489.989373] env[62508]: DEBUG nova.compute.manager [req-402d0183-442b-4a7f-b14e-3700608d0aa2 req-c5c4a8b1-de11-4103-86e8-3d191417b1b0 service nova] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Received event network-vif-deleted-ed7e3fd8-f819-44c2-8370-860c27d95f6b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.016768] env[62508]: INFO nova.compute.manager [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] instance snapshotting [ 1490.021121] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c913c71b-bf0f-4ad3-a53c-11b4493534bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.041610] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02be5a31-f427-4953-8d9f-72c65f05a07b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.083944] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.317552] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775855, 'name': CreateVM_Task, 'duration_secs': 0.405467} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.317782] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1490.318585] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.318929] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.319383] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1490.319457] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93092a44-9100-49b6-8308-582cb44edff9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.325108] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1490.325108] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f78b66-4185-69a0-b704-b90cfbc7522a" [ 1490.325108] env[62508]: _type = "Task" [ 1490.325108] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.340199] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f78b66-4185-69a0-b704-b90cfbc7522a, 'name': SearchDatastore_Task, 'duration_secs': 0.010641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.343740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.343740] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1490.343740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.343740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.343740] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.343740] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f0fe42-4cbe-4268-a1b2-f29e3d5109bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.353019] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.353231] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1490.354576] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ae3785c-9ef1-4313-ac98-31b005a2f8e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.360778] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1490.360778] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d60f95-cc3b-6bf1-6438-5bfd6fddca50" [ 1490.360778] env[62508]: _type = "Task" [ 1490.360778] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.374999] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d60f95-cc3b-6bf1-6438-5bfd6fddca50, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.377106] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ef64a3-c094-42e4-93ef-d1427cec8b0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.384793] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1490.384793] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c0598-3cca-0927-8264-5215b58e3727" [ 1490.384793] env[62508]: _type = "Task" [ 1490.384793] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.395791] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c0598-3cca-0927-8264-5215b58e3727, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.434875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.435315] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1490.438391] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.195s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.438576] env[62508]: DEBUG nova.objects.instance [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1490.558300] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1490.558300] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f1c4003a-f05e-4ef1-bb18-9faf771c84e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.576405] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1490.576405] env[62508]: value = "task-1775856" [ 1490.576405] env[62508]: _type = "Task" [ 1490.576405] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.591958] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775856, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.592593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.592946] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.593287] env[62508]: INFO nova.compute.manager [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Rebooting instance [ 1490.670727] env[62508]: DEBUG nova.network.neutron [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updated VIF entry in instance network info cache for port 971a1f15-0acf-4dbe-a120-b172a0eb9552. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.670727] env[62508]: DEBUG nova.network.neutron [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updating instance_info_cache with network_info: [{"id": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "address": "fa:16:3e:bc:21:9f", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971a1f15-0a", "ovs_interfaceid": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.873206] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Successfully updated port: d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1490.899775] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c0598-3cca-0927-8264-5215b58e3727, 'name': SearchDatastore_Task, 'duration_secs': 0.011572} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.900112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.900591] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/e07ab22e-bd07-4232-abfe-c0617c0b9813.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1490.900905] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b042c46-74ac-489e-8ecc-ec4bbb769a6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.910418] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1490.910418] env[62508]: value = "task-1775857" [ 1490.910418] env[62508]: _type = "Task" [ 1490.910418] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.923114] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.944365] env[62508]: DEBUG nova.compute.utils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1490.949234] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1490.949411] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1491.053524] env[62508]: DEBUG nova.policy [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb9a92746e574883b54b3e5d5c0e5e14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef2b6a7e3fe1491d8be03c1f6d906b2f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1491.091454] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775856, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.119041] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.119340] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.119898] env[62508]: DEBUG nova.network.neutron [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1491.172996] env[62508]: DEBUG oslo_concurrency.lockutils [req-547e6cdb-36aa-4ddf-a24f-6ec540f963bc req-4dca8cc0-61eb-42ec-b4d3-d892ce86bb32 service nova] Releasing lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.220320] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.220495] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.220761] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.221258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.221258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.228510] env[62508]: INFO nova.compute.manager [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Terminating instance [ 1491.231428] env[62508]: DEBUG nova.compute.manager [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1491.231943] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1491.232898] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e2c975-94b5-410b-97cc-e8aa2de3e5a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.245097] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.245503] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d5f0ebd-f109-4bec-828a-7786abde3841 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.256407] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1491.256407] env[62508]: value = "task-1775858" [ 1491.256407] env[62508]: _type = "Task" [ 1491.256407] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.267755] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.376670] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.377012] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.377012] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1491.430236] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775857, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.458960] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1491.463621] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5369dca-72d8-4797-b009-b43822285a7a tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.470386] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.986s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.470386] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.475044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.401s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.475044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.476089] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.144s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.477580] env[62508]: INFO nova.compute.claims [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1491.508455] env[62508]: INFO nova.scheduler.client.report [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted allocations for instance db74146d-abc3-4d48-be1b-6ad471794dbf [ 1491.514473] env[62508]: INFO nova.scheduler.client.report [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocations for instance 38d294a9-2f51-438d-b942-a88e380a981f [ 1491.590151] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775856, 'name': CreateSnapshot_Task, 'duration_secs': 0.564995} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.590518] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1491.592152] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd6a5b1-bbc6-4ab4-af79-8b2e43d549d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.772067] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775858, 'name': PowerOffVM_Task, 'duration_secs': 0.454384} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.772705] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1491.772937] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1491.773215] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbbe220f-0213-4ca3-b8b5-d02a2b4cec6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.850825] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Successfully created port: c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1491.923794] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644262} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.925194] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1491.926613] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/e07ab22e-bd07-4232-abfe-c0617c0b9813.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1491.926845] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1491.929453] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6eb4e62e-c25c-4060-a70c-e423d38e0cab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.942622] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1491.942622] env[62508]: value = "task-1775860" [ 1491.942622] env[62508]: _type = "Task" [ 1491.942622] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.953558] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.967811] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1491.968075] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1491.968866] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Deleting the datastore file [datastore1] 30e8b6ca-10fd-4e98-815d-1622f162b05c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1491.968866] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58e4dcef-0f66-452c-8dde-c359b6580d37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.972135] env[62508]: INFO nova.virt.block_device [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Booting with volume aea26e09-c771-424f-b682-9ea0232aedf2 at /dev/sda [ 1491.979653] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for the task: (returnval){ [ 1491.979653] env[62508]: value = "task-1775861" [ 1491.979653] env[62508]: _type = "Task" [ 1491.979653] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.997740] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.029031] env[62508]: DEBUG nova.network.neutron [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.032077] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ada7857-03a6-45e5-99a6-a5a18204bc67 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "db74146d-abc3-4d48-be1b-6ad471794dbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.448s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.034793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-160c5d72-dc88-4882-855e-a5c8d07a29ea tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "38d294a9-2f51-438d-b942-a88e380a981f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.291s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.035665] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c98151a-7cd3-44bd-bde3-767818af9563 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.042343] env[62508]: DEBUG nova.compute.manager [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-vif-plugged-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.042614] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Acquiring lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.043258] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.044038] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.044038] env[62508]: DEBUG nova.compute.manager [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] No waiting events found dispatching network-vif-plugged-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.044038] env[62508]: WARNING nova.compute.manager [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received unexpected event network-vif-plugged-d993966a-9d82-49a0-9e8a-f1835407ecad for instance with vm_state building and task_state spawning. [ 1492.044489] env[62508]: DEBUG nova.compute.manager [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.044921] env[62508]: DEBUG nova.compute.manager [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing instance network info cache due to event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1492.045031] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.056390] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be875bad-1be8-4029-8dda-bb3cfb52e142 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.097815] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-427aaf8f-0fc2-42ca-a78c-85948e4607b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.116870] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1492.118281] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f4dd622a-5686-4155-9777-5763b6107c7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.125903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8159ca8-cd5a-4820-82a7-41b841f06144 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.148284] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1492.148284] env[62508]: value = "task-1775862" [ 1492.148284] env[62508]: _type = "Task" [ 1492.148284] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.171916] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a74826d-69c7-4fb0-bae9-dee8d75fbb21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.175337] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.180528] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3352a589-ebc4-4282-8b3d-fa211ff6bcd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.206742] env[62508]: DEBUG nova.virt.block_device [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updating existing volume attachment record: 44249f2a-ec68-4b08-ac16-febf890f177d {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1492.210744] env[62508]: DEBUG nova.network.neutron [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.459235] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.22596} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.460678] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1492.460678] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d511aab6-3a9a-4587-bac9-b6ab6cf5d41a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.490047] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/e07ab22e-bd07-4232-abfe-c0617c0b9813.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1492.494175] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b4ddcaa-6081-4854-8267-ad64cc567e19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.523812] env[62508]: DEBUG oslo_vmware.api [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Task: {'id': task-1775861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.512207} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.525444] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1492.525657] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1492.525839] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1492.526029] env[62508]: INFO nova.compute.manager [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1492.526645] env[62508]: DEBUG oslo.service.loopingcall [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.527057] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1492.527057] env[62508]: value = "task-1775863" [ 1492.527057] env[62508]: _type = "Task" [ 1492.527057] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.527268] env[62508]: DEBUG nova.compute.manager [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1492.527268] env[62508]: DEBUG nova.network.neutron [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1492.534528] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.539622] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775863, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.540121] env[62508]: DEBUG nova.compute.manager [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.543644] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5079430-1dc1-4262-abb8-497498c413d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.664270] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.713675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.713994] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Instance network_info: |[{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1492.714349] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.714533] env[62508]: DEBUG nova.network.neutron [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.715795] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:45:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1712475b-e1be-49e0-9a18-febd305c90ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd993966a-9d82-49a0-9e8a-f1835407ecad', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1492.725269] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Creating folder: Project (b2b18b2f1d6d439382b9e0fc89b2a3aa). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1492.731381] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2694e581-0c00-4ceb-87ff-3416f9033044 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.745321] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Created folder: Project (b2b18b2f1d6d439382b9e0fc89b2a3aa) in parent group-v368536. [ 1492.745535] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Creating folder: Instances. Parent ref: group-v368685. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1492.745852] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d6d6b34-21b6-44e9-a52b-94180be98e1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.757787] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Created folder: Instances in parent group-v368685. [ 1492.759244] env[62508]: DEBUG oslo.service.loopingcall [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.759244] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1492.759244] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1cb177a-121b-4388-b341-af6c54d88d6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.791214] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1492.791214] env[62508]: value = "task-1775866" [ 1492.791214] env[62508]: _type = "Task" [ 1492.791214] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.803636] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775866, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.045010] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775863, 'name': ReconfigVM_Task, 'duration_secs': 0.420065} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.046732] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Reconfigured VM instance instance-00000032 to attach disk [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/e07ab22e-bd07-4232-abfe-c0617c0b9813.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1493.047262] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62508) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1493.048957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff94c35b-5fd3-4ef3-a90a-7822f6f7988f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.052517] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-93a7d00e-5818-48e2-8ab2-1ffc85ed3891 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.062695] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1493.062695] env[62508]: value = "task-1775867" [ 1493.062695] env[62508]: _type = "Task" [ 1493.062695] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.063898] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cd53ea-b1ff-4e1b-b23d-c64c090912f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.111186] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7f4355-3f05-49b2-a7a2-fef6ad71fee5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.126175] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c9bf66-b919-4b6d-8cf2-cf56f929fa48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.139526] env[62508]: DEBUG nova.compute.provider_tree [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.161317] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.181247] env[62508]: DEBUG nova.compute.manager [req-692b76f8-959b-4bb7-9941-f75aa22e587e req-6a727d04-216e-4e2f-bd40-b71d5a9bf83c service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Received event network-vif-deleted-f3739879-1b13-47e9-bec2-609738a10c75 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.181466] env[62508]: INFO nova.compute.manager [req-692b76f8-959b-4bb7-9941-f75aa22e587e req-6a727d04-216e-4e2f-bd40-b71d5a9bf83c service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Neutron deleted interface f3739879-1b13-47e9-bec2-609738a10c75; detaching it from the instance and deleting it from the info cache [ 1493.181628] env[62508]: DEBUG nova.network.neutron [req-692b76f8-959b-4bb7-9941-f75aa22e587e req-6a727d04-216e-4e2f-bd40-b71d5a9bf83c service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.267634] env[62508]: DEBUG nova.network.neutron [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updated VIF entry in instance network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.268017] env[62508]: DEBUG nova.network.neutron [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.306957] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775866, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.562194] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8972cb4c-17d1-455a-a9f3-942d55dc5177 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.574843] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775867, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.073265} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.576779] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62508) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1493.577094] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Doing hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1493.577887] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e939a27c-bf64-4d0f-82b5-b4714786a84e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.581125] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e1ce4b7d-c3ec-47d4-a323-dff72cff8197 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.606142] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/ephemeral_0.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.608485] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4eb37bf-7011-49f2-9f66-ea96f2abcc5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.627229] env[62508]: DEBUG oslo_vmware.api [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1493.627229] env[62508]: value = "task-1775868" [ 1493.627229] env[62508]: _type = "Task" [ 1493.627229] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.628153] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Successfully updated port: c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1493.641414] env[62508]: DEBUG oslo_vmware.api [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775868, 'name': ResetVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.643979] env[62508]: DEBUG nova.scheduler.client.report [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.651529] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1493.651529] env[62508]: value = "task-1775869" [ 1493.651529] env[62508]: _type = "Task" [ 1493.651529] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.654851] env[62508]: DEBUG nova.network.neutron [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.667109] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.670488] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775869, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.686008] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e9679ce-5a9a-4882-8bc5-b66f0beb2f58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.696342] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667347fe-e306-49a6-9986-0fe004ab70c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.739217] env[62508]: DEBUG nova.compute.manager [req-692b76f8-959b-4bb7-9941-f75aa22e587e req-6a727d04-216e-4e2f-bd40-b71d5a9bf83c service nova] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Detach interface failed, port_id=f3739879-1b13-47e9-bec2-609738a10c75, reason: Instance 30e8b6ca-10fd-4e98-815d-1622f162b05c could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1493.771534] env[62508]: DEBUG oslo_concurrency.lockutils [req-1971fe4c-77ee-4253-99f0-e790c664ceb5 req-9986d72c-4afc-4e81-b2c9-feebcd72afcb service nova] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.809013] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775866, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.113710] env[62508]: DEBUG nova.compute.manager [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Received event network-vif-plugged-c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1494.114040] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Acquiring lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.114337] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.114410] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.114589] env[62508]: DEBUG nova.compute.manager [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] No waiting events found dispatching network-vif-plugged-c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1494.115368] env[62508]: WARNING nova.compute.manager [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Received unexpected event network-vif-plugged-c34d40a3-404e-4f74-8cdc-ba8732e8c103 for instance with vm_state building and task_state spawning. [ 1494.115368] env[62508]: DEBUG nova.compute.manager [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Received event network-changed-c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1494.115368] env[62508]: DEBUG nova.compute.manager [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Refreshing instance network info cache due to event network-changed-c34d40a3-404e-4f74-8cdc-ba8732e8c103. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1494.115368] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Acquiring lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.115886] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Acquired lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.115886] env[62508]: DEBUG nova.network.neutron [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Refreshing network info cache for port c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.137376] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.146033] env[62508]: DEBUG oslo_vmware.api [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1775868, 'name': ResetVM_Task, 'duration_secs': 0.102585} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.146033] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Did hard reboot of VM {{(pid=62508) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1494.146033] env[62508]: DEBUG nova.compute.manager [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1494.146915] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7cf4ee-b73b-41d6-a261-d045b8dca460 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.150294] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.150832] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1494.153972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.374s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.154226] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.156386] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 25.934s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.164137] env[62508]: INFO nova.compute.manager [-] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Took 1.64 seconds to deallocate network for instance. [ 1494.199027] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.207316] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775869, 'name': ReconfigVM_Task, 'duration_secs': 0.481468} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.212467] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Reconfigured VM instance instance-00000032 to attach disk [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813/ephemeral_0.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.212626] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9089bc31-db7a-4e62-9eb7-d82ed789b674 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.222036] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1494.222036] env[62508]: value = "task-1775870" [ 1494.222036] env[62508]: _type = "Task" [ 1494.222036] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.225419] env[62508]: INFO nova.scheduler.client.report [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Deleted allocations for instance 7d23d8f0-d7a9-4236-ad28-208e77b72138 [ 1494.242152] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775870, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.311113] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775866, 'name': CreateVM_Task, 'duration_secs': 1.307902} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.311296] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.311998] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.312179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.312492] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.312744] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91347167-e0a9-4534-8d7e-67d00f746804 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.317971] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1494.317971] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52517151-3ee8-c94e-b976-dfce14c0a8d8" [ 1494.317971] env[62508]: _type = "Task" [ 1494.317971] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.326767] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52517151-3ee8-c94e-b976-dfce14c0a8d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.337088] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1494.337620] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1494.337829] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1494.337987] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1494.338190] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1494.338377] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1494.338489] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1494.338673] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1494.338829] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1494.338995] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1494.339189] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1494.339388] env[62508]: DEBUG nova.virt.hardware [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1494.340542] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23e0ea1-b372-4e59-aed0-41db3de6878d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.348330] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0005fb2b-eb5d-42e7-9a33-713a573c8be8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.522610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.522811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.660300] env[62508]: DEBUG nova.network.neutron [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.663520] env[62508]: DEBUG nova.compute.utils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1494.667187] env[62508]: INFO nova.compute.claims [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1494.674186] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1494.674368] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1494.684259] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775862, 'name': CloneVM_Task, 'duration_secs': 2.07498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.685042] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Created linked-clone VM from snapshot [ 1494.686227] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1464783-aff6-4a4c-996e-1bf43eeb62b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.691342] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.696725] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Uploading image 3b90f393-c75c-47dc-8dbc-3f3647e17ab7 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1494.699645] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5ed8aa9-4df3-4d64-9b27-e368865add67 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.107s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.727850] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1494.727850] env[62508]: value = "vm-368684" [ 1494.727850] env[62508]: _type = "VirtualMachine" [ 1494.727850] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1494.728266] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e7cfd927-3e85-40df-a324-6fab0180c26c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.736600] env[62508]: DEBUG nova.policy [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '596ff35abb3949e9b3d3d9b80e6eae69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a9d6b8eb4e44a7a3d7fa4abe0cd5bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1494.740500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80dc2d85-5d3f-4678-a400-e707d2f869e3 tempest-VolumesAssistedSnapshotsTest-65146600 tempest-VolumesAssistedSnapshotsTest-65146600-project-member] Lock "7d23d8f0-d7a9-4236-ad28-208e77b72138" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.322s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.748401] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775870, 'name': Rename_Task, 'duration_secs': 0.31168} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.749991] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1494.750375] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease: (returnval){ [ 1494.750375] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52866548-0458-733b-24ab-bbe47518fefe" [ 1494.750375] env[62508]: _type = "HttpNfcLease" [ 1494.750375] env[62508]: } obtained for exporting VM: (result){ [ 1494.750375] env[62508]: value = "vm-368684" [ 1494.750375] env[62508]: _type = "VirtualMachine" [ 1494.750375] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1494.750613] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the lease: (returnval){ [ 1494.750613] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52866548-0458-733b-24ab-bbe47518fefe" [ 1494.750613] env[62508]: _type = "HttpNfcLease" [ 1494.750613] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1494.750752] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c84a65f8-bb3e-47de-88fd-86f623a5ea22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.762162] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1494.762162] env[62508]: value = "task-1775872" [ 1494.762162] env[62508]: _type = "Task" [ 1494.762162] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.763730] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1494.763730] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52866548-0458-733b-24ab-bbe47518fefe" [ 1494.763730] env[62508]: _type = "HttpNfcLease" [ 1494.763730] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1494.767100] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1494.767100] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52866548-0458-733b-24ab-bbe47518fefe" [ 1494.767100] env[62508]: _type = "HttpNfcLease" [ 1494.767100] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1494.768072] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3af483a-76f3-4085-8e5c-07a2e8718195 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.776910] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1494.777112] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1494.781240] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.842189] env[62508]: DEBUG nova.network.neutron [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.854667] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52517151-3ee8-c94e-b976-dfce14c0a8d8, 'name': SearchDatastore_Task, 'duration_secs': 0.01208} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.855545] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.855782] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.856018] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.856196] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.856377] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.856918] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05d0a30b-3246-445b-abab-3f7ea9d67291 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.866198] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.866389] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.867115] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c04ca4a-0ee0-4a21-8895-c03d639e1a63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.873432] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1494.873432] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52585967-4689-0b6f-c2c9-30dc3e8ca80b" [ 1494.873432] env[62508]: _type = "Task" [ 1494.873432] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.881461] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52585967-4689-0b6f-c2c9-30dc3e8ca80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.893811] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8c8f3d07-0d43-4d45-a6a6-215b2a4a6a65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.072461] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Successfully created port: 8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.175039] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1495.182143] env[62508]: INFO nova.compute.resource_tracker [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating resource usage from migration 4fdfcd20-c840-423e-a6be-dfa3f61e5d01 [ 1495.280749] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775872, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.348254] env[62508]: DEBUG oslo_concurrency.lockutils [req-0edede5e-9a2a-4750-8553-00ff9a88df45 req-3e381a79-1a7a-4fe0-949b-ed9ba1135e28 service nova] Releasing lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.349338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquired lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.349338] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1495.395142] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52585967-4689-0b6f-c2c9-30dc3e8ca80b, 'name': SearchDatastore_Task, 'duration_secs': 0.014387} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.396248] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36cd9f42-a9f2-4e76-b449-73db617e3a97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.404208] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1495.404208] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5247864a-199d-87a5-fcce-3802851fada1" [ 1495.404208] env[62508]: _type = "Task" [ 1495.404208] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.422575] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5247864a-199d-87a5-fcce-3802851fada1, 'name': SearchDatastore_Task, 'duration_secs': 0.01242} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.424074] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.425489] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 6afa4e73-64b4-4b10-b598-433f0c22ecb3/6afa4e73-64b4-4b10-b598-433f0c22ecb3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.425882] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fd1f17c-88d2-4ec7-8647-7d20b30b05c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.437144] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1495.437144] env[62508]: value = "task-1775873" [ 1495.437144] env[62508]: _type = "Task" [ 1495.437144] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.453787] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.775882] env[62508]: DEBUG oslo_vmware.api [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1775872, 'name': PowerOnVM_Task, 'duration_secs': 0.613094} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.776640] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1495.776640] env[62508]: INFO nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Took 8.11 seconds to spawn the instance on the hypervisor. [ 1495.776640] env[62508]: DEBUG nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.778221] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c71f091-b9f0-4a9d-a026-2efe7d5da5e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.819998] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c962c-4372-4cf0-aaa7-649ebe39af3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.834594] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431d7239-c8b4-4ec6-b38e-95927cc8c2cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.875816] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb58f03-7109-4886-b2e7-02bfa2edd3ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.889830] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4519cf-a77f-47cb-bc5d-87f1d70e1b25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.906718] env[62508]: DEBUG nova.compute.provider_tree [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.915673] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1495.960248] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775873, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.192278] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1496.219339] env[62508]: DEBUG nova.network.neutron [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updating instance_info_cache with network_info: [{"id": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "address": "fa:16:3e:65:8e:bb", "network": {"id": "5abc1d3a-649b-4566-af47-32dd35b19f62", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1555036678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2b6a7e3fe1491d8be03c1f6d906b2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d40a3-40", "ovs_interfaceid": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.236835] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1496.237384] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1496.240073] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.240298] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1496.240451] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.240604] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1496.240825] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1496.240983] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1496.241173] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1496.241345] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1496.241606] env[62508]: DEBUG nova.virt.hardware [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1496.242552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b31fcd-ac94-4993-92d9-f898263a73b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.258490] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f616e78-bb24-4e53-968d-acffcba7e2f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.328122] env[62508]: INFO nova.compute.manager [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Took 45.98 seconds to build instance. [ 1496.412046] env[62508]: DEBUG nova.scheduler.client.report [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.449380] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597333} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.449826] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 6afa4e73-64b4-4b10-b598-433f0c22ecb3/6afa4e73-64b4-4b10-b598-433f0c22ecb3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.450077] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.450329] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92da89e7-5d00-4c03-9275-fc52795417cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.462017] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1496.462017] env[62508]: value = "task-1775874" [ 1496.462017] env[62508]: _type = "Task" [ 1496.462017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.474415] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.706084] env[62508]: DEBUG nova.compute.manager [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Received event network-vif-plugged-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1496.706340] env[62508]: DEBUG oslo_concurrency.lockutils [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] Acquiring lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.706547] env[62508]: DEBUG oslo_concurrency.lockutils [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.706778] env[62508]: DEBUG oslo_concurrency.lockutils [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.707039] env[62508]: DEBUG nova.compute.manager [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] No waiting events found dispatching network-vif-plugged-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1496.707466] env[62508]: WARNING nova.compute.manager [req-5ed3810e-bbc7-40d3-af99-745fb2a74cbd req-64d7ebc8-d73f-469e-a3b1-8c2860d0882e service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Received unexpected event network-vif-plugged-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b for instance with vm_state building and task_state spawning. [ 1496.723353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Releasing lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.723534] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance network_info: |[{"id": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "address": "fa:16:3e:65:8e:bb", "network": {"id": "5abc1d3a-649b-4566-af47-32dd35b19f62", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1555036678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2b6a7e3fe1491d8be03c1f6d906b2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d40a3-40", "ovs_interfaceid": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1496.724244] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:8e:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c34d40a3-404e-4f74-8cdc-ba8732e8c103', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1496.732690] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Creating folder: Project (ef2b6a7e3fe1491d8be03c1f6d906b2f). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1496.733319] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-243a8f0a-bb3d-4d75-be40-9a4f001ccf7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.750147] env[62508]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1496.750341] env[62508]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62508) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1496.750704] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Folder already exists: Project (ef2b6a7e3fe1491d8be03c1f6d906b2f). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1496.750915] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Creating folder: Instances. Parent ref: group-v368643. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1496.751245] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc89e3b2-9ce1-4f84-b594-ed865e741821 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.765678] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Created folder: Instances in parent group-v368643. [ 1496.765939] env[62508]: DEBUG oslo.service.loopingcall [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.766162] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1496.766378] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6b0563e-057e-49f8-8065-30559a01d3b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.790289] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1496.790289] env[62508]: value = "task-1775877" [ 1496.790289] env[62508]: _type = "Task" [ 1496.790289] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.799299] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775877, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.807983] env[62508]: DEBUG oslo_concurrency.lockutils [None req-36532487-6767-4b76-833c-031388f6ef3e tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.859s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.837370] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Successfully updated port: 8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1496.917353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.761s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.917566] env[62508]: INFO nova.compute.manager [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Migrating [ 1496.925088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.109s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.926563] env[62508]: INFO nova.compute.claims [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1496.973189] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091576} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.973489] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1496.974328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdee720-973f-4bfb-886d-cfd15f038349 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.001388] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 6afa4e73-64b4-4b10-b598-433f0c22ecb3/6afa4e73-64b4-4b10-b598-433f0c22ecb3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.001918] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4787aa46-300a-41b2-b822-f2ed23711dcb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.027198] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1497.027198] env[62508]: value = "task-1775878" [ 1497.027198] env[62508]: _type = "Task" [ 1497.027198] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.038315] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775878, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.303188] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775877, 'name': CreateVM_Task, 'duration_secs': 0.471687} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.303474] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1497.304243] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368657', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'name': 'volume-aea26e09-c771-424f-b682-9ea0232aedf2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f307d4d5-e877-4d0a-951c-779c1d2e573b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'serial': 'aea26e09-c771-424f-b682-9ea0232aedf2'}, 'disk_bus': None, 'guest_format': None, 'attachment_id': '44249f2a-ec68-4b08-ac16-febf890f177d', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62508) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1497.304466] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Root volume attach. Driver type: vmdk {{(pid=62508) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1497.305308] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b509f130-cca3-4117-89d4-bfb87c9b71a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.310498] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1497.317903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fc3845-9f64-4e3e-a0e6-68ed76fc0153 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.326509] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed855eb-47ab-40e6-a498-09f537689574 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.333394] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ee4822bc-bebf-437a-bfb6-74d85df6cdcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.341602] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.341753] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.341899] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1497.343603] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1497.343603] env[62508]: value = "task-1775879" [ 1497.343603] env[62508]: _type = "Task" [ 1497.343603] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.354298] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775879, 'name': RelocateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.441138] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.441382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.441613] env[62508]: DEBUG nova.network.neutron [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1497.538088] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775878, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.837245] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.856505] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775879, 'name': RelocateVM_Task, 'duration_secs': 0.032254} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.856853] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1497.856932] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368657', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'name': 'volume-aea26e09-c771-424f-b682-9ea0232aedf2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f307d4d5-e877-4d0a-951c-779c1d2e573b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'serial': 'aea26e09-c771-424f-b682-9ea0232aedf2'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1497.857743] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1e7940-64a3-4145-a514-575129d9d711 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.883988] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91469f4a-4a0c-43bc-9710-90ca8c1446af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.912037] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] volume-aea26e09-c771-424f-b682-9ea0232aedf2/volume-aea26e09-c771-424f-b682-9ea0232aedf2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.912375] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae20f3c6-a596-4cd2-afb5-51d390c1baa9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.938426] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1497.938426] env[62508]: value = "task-1775880" [ 1497.938426] env[62508]: _type = "Task" [ 1497.938426] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.950794] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.957877] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1498.038147] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775878, 'name': ReconfigVM_Task, 'duration_secs': 0.68416} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.038662] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 6afa4e73-64b4-4b10-b598-433f0c22ecb3/6afa4e73-64b4-4b10-b598-433f0c22ecb3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.040032] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f821993-56ce-499e-ae2f-c10afdb3bc71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.049407] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1498.049407] env[62508]: value = "task-1775881" [ 1498.049407] env[62508]: _type = "Task" [ 1498.049407] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.059373] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775881, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.301196] env[62508]: DEBUG nova.network.neutron [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.384223] env[62508]: DEBUG nova.network.neutron [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Updating instance_info_cache with network_info: [{"id": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "address": "fa:16:3e:3e:45:aa", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6ad85-c6", "ovs_interfaceid": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.452768] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775880, 'name': ReconfigVM_Task, 'duration_secs': 0.351608} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.453180] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Reconfigured VM instance instance-00000034 to attach disk [datastore1] volume-aea26e09-c771-424f-b682-9ea0232aedf2/volume-aea26e09-c771-424f-b682-9ea0232aedf2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.460410] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e9432e8-728f-4d30-ab72-712c48ea221d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.480620] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1498.480620] env[62508]: value = "task-1775882" [ 1498.480620] env[62508]: _type = "Task" [ 1498.480620] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.491137] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775882, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.492188] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fc4236-03f8-4b7e-be3f-fc7607b55244 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.500473] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ead054c-f1a4-4072-9b6d-05548a9b5235 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.538636] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda5b5ac-eb00-4fa3-8c10-20b574d9e32d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.548735] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc02571-80d0-443e-897a-fd765f24b8d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.562781] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775881, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.572719] env[62508]: DEBUG nova.compute.provider_tree [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.761093] env[62508]: DEBUG nova.compute.manager [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Received event network-changed-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1498.761433] env[62508]: DEBUG nova.compute.manager [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Refreshing instance network info cache due to event network-changed-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1498.761824] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Acquiring lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.805767] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.889537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.889957] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance network_info: |[{"id": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "address": "fa:16:3e:3e:45:aa", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6ad85-c6", "ovs_interfaceid": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1498.893018] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Acquired lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.893018] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Refreshing network info cache for port 8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1498.893018] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:45:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ba6ad85-c68d-4f7c-87dd-acd4fe19986b', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.901381] env[62508]: DEBUG oslo.service.loopingcall [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.904845] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1498.905436] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f07d059c-8717-43ec-be11-6f9518e72c3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.930020] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1498.930020] env[62508]: value = "task-1775883" [ 1498.930020] env[62508]: _type = "Task" [ 1498.930020] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.940591] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775883, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.995032] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775882, 'name': ReconfigVM_Task, 'duration_secs': 0.146943} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.995032] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368657', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'name': 'volume-aea26e09-c771-424f-b682-9ea0232aedf2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f307d4d5-e877-4d0a-951c-779c1d2e573b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'serial': 'aea26e09-c771-424f-b682-9ea0232aedf2'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1498.995032] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0d1754e-9790-4d4e-b21b-d6672e344d3f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.001772] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1499.001772] env[62508]: value = "task-1775884" [ 1499.001772] env[62508]: _type = "Task" [ 1499.001772] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.016175] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775884, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.071139] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775881, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.075867] env[62508]: DEBUG nova.scheduler.client.report [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1499.163226] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Updated VIF entry in instance network info cache for port 8ba6ad85-c68d-4f7c-87dd-acd4fe19986b. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.163647] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Updating instance_info_cache with network_info: [{"id": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "address": "fa:16:3e:3e:45:aa", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba6ad85-c6", "ovs_interfaceid": "8ba6ad85-c68d-4f7c-87dd-acd4fe19986b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.441907] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775883, 'name': CreateVM_Task, 'duration_secs': 0.476727} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.442205] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1499.443072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.443072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.443304] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1499.445078] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cb735cb-825b-428a-bab4-d9e300a93ba8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.449207] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1499.449207] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522e45ac-9df4-2adc-2b91-d9268d86f805" [ 1499.449207] env[62508]: _type = "Task" [ 1499.449207] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.459259] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522e45ac-9df4-2adc-2b91-d9268d86f805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.513364] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775884, 'name': Rename_Task, 'duration_secs': 0.210992} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.513867] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.514342] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c66a94ae-4879-47c9-ad4d-39c4ce610e84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.522694] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1499.522694] env[62508]: value = "task-1775885" [ 1499.522694] env[62508]: _type = "Task" [ 1499.522694] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.531957] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.566934] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775881, 'name': Rename_Task, 'duration_secs': 1.198744} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.567275] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.567557] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b13809fb-2ddd-4ede-be18-d0331269b80b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.575955] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1499.575955] env[62508]: value = "task-1775886" [ 1499.575955] env[62508]: _type = "Task" [ 1499.575955] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.582901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.583509] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1499.586451] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.388s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.586705] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.589248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.088s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.590700] env[62508]: INFO nova.compute.claims [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.597130] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.628712] env[62508]: INFO nova.scheduler.client.report [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Deleted allocations for instance fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8 [ 1499.668794] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Releasing lock "refresh_cache-e156aef5-bb56-4c17-9e7e-9419b672c9cf" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.669078] env[62508]: DEBUG nova.compute.manager [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Received event network-changed-971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.669255] env[62508]: DEBUG nova.compute.manager [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Refreshing instance network info cache due to event network-changed-971a1f15-0acf-4dbe-a120-b172a0eb9552. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1499.669464] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Acquiring lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.669605] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Acquired lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.669766] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Refreshing network info cache for port 971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.961951] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522e45ac-9df4-2adc-2b91-d9268d86f805, 'name': SearchDatastore_Task, 'duration_secs': 0.034128} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.962301] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.962540] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1499.962781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.962925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.963257] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.963548] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84253e99-8af6-49d6-b747-4627ad6849de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.975869] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.976093] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1499.976887] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a785cc5b-122f-4ebb-a63d-28134104a3bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.984872] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1499.984872] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52375be9-cc69-43c9-de71-282d29dd2951" [ 1499.984872] env[62508]: _type = "Task" [ 1499.984872] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.999900] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52375be9-cc69-43c9-de71-282d29dd2951, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.035581] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775885, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.090825] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775886, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.095482] env[62508]: DEBUG nova.compute.utils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1500.099130] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1500.099449] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1500.137948] env[62508]: DEBUG oslo_concurrency.lockutils [None req-257047dc-2e2d-4081-9863-2584802e4395 tempest-ImagesOneServerTestJSON-787373465 tempest-ImagesOneServerTestJSON-787373465-project-member] Lock "fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.195s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.172984] env[62508]: DEBUG nova.policy [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285fedd2e7fd4d259ca7fc57c3fcbf46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74c45615efbb425fbec8400f6d225892', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1500.324179] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb16f030-d94f-4bce-a28e-fc7d75bf3b07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.353313] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1500.503372] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52375be9-cc69-43c9-de71-282d29dd2951, 'name': SearchDatastore_Task, 'duration_secs': 0.021257} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.506419] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60d89bb2-67f9-463e-86ba-3c22caebcad6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.508188] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updated VIF entry in instance network info cache for port 971a1f15-0acf-4dbe-a120-b172a0eb9552. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.508537] env[62508]: DEBUG nova.network.neutron [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updating instance_info_cache with network_info: [{"id": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "address": "fa:16:3e:bc:21:9f", "network": {"id": "2e10157c-8592-41c4-8540-9696923dc9ca", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1194878546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51842f3e9f83452789923afbafd40bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971a1f15-0a", "ovs_interfaceid": "971a1f15-0acf-4dbe-a120-b172a0eb9552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.514364] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1500.514364] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52299234-cc84-aca1-a98a-8114ac69046f" [ 1500.514364] env[62508]: _type = "Task" [ 1500.514364] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.528302] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52299234-cc84-aca1-a98a-8114ac69046f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.539258] env[62508]: DEBUG oslo_vmware.api [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1775885, 'name': PowerOnVM_Task, 'duration_secs': 0.584038} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.539602] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.539751] env[62508]: INFO nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Took 6.20 seconds to spawn the instance on the hypervisor. [ 1500.539929] env[62508]: DEBUG nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1500.540880] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1302404f-32d3-4b2d-a7cf-713c1f03e606 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.587701] env[62508]: DEBUG oslo_vmware.api [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1775886, 'name': PowerOnVM_Task, 'duration_secs': 0.841842} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.588034] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.588258] env[62508]: INFO nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Took 10.74 seconds to spawn the instance on the hypervisor. [ 1500.588444] env[62508]: DEBUG nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1500.589902] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0938c751-2d8e-4e3b-a9a7-e58c002831ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.602479] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1500.866189] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1500.866987] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Successfully created port: 32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.869086] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3201171d-4071-49f8-83be-6ce1a379cf3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.878915] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1500.878915] env[62508]: value = "task-1775887" [ 1500.878915] env[62508]: _type = "Task" [ 1500.878915] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.892971] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775887, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.012555] env[62508]: DEBUG oslo_concurrency.lockutils [req-580b539d-18fb-48c1-909f-569803be4d49 req-3410f990-937d-4dde-8c84-4f18129716b3 service nova] Releasing lock "refresh_cache-e07ab22e-bd07-4232-abfe-c0617c0b9813" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.027630] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52299234-cc84-aca1-a98a-8114ac69046f, 'name': SearchDatastore_Task, 'duration_secs': 0.015797} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.030373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.030646] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e156aef5-bb56-4c17-9e7e-9419b672c9cf/e156aef5-bb56-4c17-9e7e-9419b672c9cf.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1501.031149] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f532a05-e6d0-4d14-ad7d-12863c23968e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.040069] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1501.040069] env[62508]: value = "task-1775888" [ 1501.040069] env[62508]: _type = "Task" [ 1501.040069] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.057242] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.062557] env[62508]: INFO nova.compute.manager [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Took 41.63 seconds to build instance. [ 1501.084569] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0711cd1e-e6c1-4a97-a690-853fd2ccc09c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.093395] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14223680-01b6-4f8d-8825-b418a3a3ce8a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.137570] env[62508]: INFO nova.compute.manager [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Took 45.39 seconds to build instance. [ 1501.140027] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9265de4c-0a9a-404d-915f-27642579fd4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.149746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee3741d-04d2-4f14-824a-408215f099df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.173584] env[62508]: DEBUG nova.compute.provider_tree [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.398290] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775887, 'name': PowerOffVM_Task, 'duration_secs': 0.384991} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.401022] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.401022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1501.554123] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775888, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.568019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-511f5ac6-06b2-44d9-af31-e90c2de3d088 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.145s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.645462] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1501.648118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-df6a2d16-cd5a-4c89-8c30-724805c976d2 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.024s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.678156] env[62508]: DEBUG nova.scheduler.client.report [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1501.689231] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1501.689684] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1501.689684] env[62508]: DEBUG nova.virt.hardware [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1501.690434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f62d99-ef7a-4973-b7b0-d68db7d3e1c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.702476] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d31eae-bd09-4518-a03c-def8b5ceab36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.910611] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1501.910878] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1501.911074] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.911288] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1501.911435] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.911580] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1501.911778] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1501.911935] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1501.912374] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1501.912374] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1501.912528] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1501.917729] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25581e3f-bcb9-49b2-9478-25fecaa592c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.936369] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1501.936369] env[62508]: value = "task-1775889" [ 1501.936369] env[62508]: _type = "Task" [ 1501.936369] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.947811] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.051169] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.847356} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.051502] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e156aef5-bb56-4c17-9e7e-9419b672c9cf/e156aef5-bb56-4c17-9e7e-9419b672c9cf.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1502.051779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1502.052103] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93ffffcc-ba18-4ac1-a492-2be3ac640d2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.060933] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1502.060933] env[62508]: value = "task-1775890" [ 1502.060933] env[62508]: _type = "Task" [ 1502.060933] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.071922] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.195631] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.196527] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1502.201150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.411s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.204915] env[62508]: INFO nova.compute.claims [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1502.450213] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775889, 'name': ReconfigVM_Task, 'duration_secs': 0.297133} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.450564] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1502.575556] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098465} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.575837] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1502.577679] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6291a263-8bbd-4b55-a597-495c0450d424 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.612623] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] e156aef5-bb56-4c17-9e7e-9419b672c9cf/e156aef5-bb56-4c17-9e7e-9419b672c9cf.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1502.613144] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f01c513a-feff-48a9-b265-2af55da13a8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.632513] env[62508]: DEBUG nova.compute.manager [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.632669] env[62508]: DEBUG nova.compute.manager [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing instance network info cache due to event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1502.632889] env[62508]: DEBUG oslo_concurrency.lockutils [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.633100] env[62508]: DEBUG oslo_concurrency.lockutils [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.633280] env[62508]: DEBUG nova.network.neutron [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.643318] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1502.643318] env[62508]: value = "task-1775891" [ 1502.643318] env[62508]: _type = "Task" [ 1502.643318] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.658591] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.709965] env[62508]: DEBUG nova.compute.utils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1502.714656] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1502.714841] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1502.788770] env[62508]: DEBUG nova.policy [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c96cc4a58a4321837c1ab8badc686a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0156fba01363470eaa9771d5f296f730', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1502.887589] env[62508]: DEBUG nova.compute.manager [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Received event network-changed-c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.887802] env[62508]: DEBUG nova.compute.manager [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Refreshing instance network info cache due to event network-changed-c34d40a3-404e-4f74-8cdc-ba8732e8c103. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1502.888031] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] Acquiring lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.888704] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] Acquired lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.889161] env[62508]: DEBUG nova.network.neutron [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Refreshing network info cache for port c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.958631] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:10:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='192acf26-1962-4b3a-b461-037bee820f70',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1531657240',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1502.958877] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1502.959042] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.959649] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1502.959859] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.960032] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1502.960356] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1502.960448] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1502.960599] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1502.960758] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1502.962019] env[62508]: DEBUG nova.virt.hardware [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1502.969552] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1502.970727] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a3350b5-98f9-44dd-938c-637a0d4fc61a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.996570] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1502.996570] env[62508]: value = "task-1775892" [ 1502.996570] env[62508]: _type = "Task" [ 1502.996570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.009628] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775892, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.162762] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775891, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.216884] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1503.347823] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Successfully updated port: 32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1503.399142] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Successfully created port: 179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1503.513795] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775892, 'name': ReconfigVM_Task, 'duration_secs': 0.246482} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.517504] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1503.521642] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c39d1f2-72a7-42c5-816e-ba4154c524d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.553312] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1503.557040] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ed43efe-e861-4ade-b85a-36dabbdd02d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.587039] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1503.587039] env[62508]: value = "task-1775893" [ 1503.587039] env[62508]: _type = "Task" [ 1503.587039] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.598426] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.661615] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775891, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.719015] env[62508]: DEBUG nova.network.neutron [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updated VIF entry in instance network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.719428] env[62508]: DEBUG nova.network.neutron [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.833198] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21256b9-0b0f-4298-92a6-af610f5eedbd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.843179] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc740312-edb2-4fcd-bf7b-d8a2c9daefb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.882793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.882793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.882793] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1503.885822] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70053637-2327-44aa-a326-7992114a12eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.895044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51490b2f-1dfe-4b4c-bba9-2566402c8984 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.911689] env[62508]: DEBUG nova.compute.provider_tree [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.948843] env[62508]: DEBUG nova.network.neutron [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updated VIF entry in instance network info cache for port c34d40a3-404e-4f74-8cdc-ba8732e8c103. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.951175] env[62508]: DEBUG nova.network.neutron [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updating instance_info_cache with network_info: [{"id": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "address": "fa:16:3e:65:8e:bb", "network": {"id": "5abc1d3a-649b-4566-af47-32dd35b19f62", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1555036678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2b6a7e3fe1491d8be03c1f6d906b2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d40a3-40", "ovs_interfaceid": "c34d40a3-404e-4f74-8cdc-ba8732e8c103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.105316] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.156213] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775891, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.222394] env[62508]: DEBUG oslo_concurrency.lockutils [req-077c102a-1b4d-4d07-8730-aa17408ca2ed req-93c58209-a576-4b6f-b6fd-541c99dbd055 service nova] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.234218] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1504.262762] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1504.263073] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1504.263478] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1504.263478] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1504.263660] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1504.264164] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1504.264164] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1504.264317] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1504.264657] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1504.264864] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1504.265224] env[62508]: DEBUG nova.virt.hardware [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1504.266216] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee34df2-7198-430b-9c2a-27f9b2bcde6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.278690] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d077d00-eced-40e9-a394-15777beee55b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.415880] env[62508]: DEBUG nova.scheduler.client.report [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1504.438311] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.452593] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a622c73-fe41-4e78-bee9-f31cc72a5ab5 req-e633c258-31d1-4c36-9edf-4daf8bf1d7b1 service nova] Releasing lock "refresh_cache-f307d4d5-e877-4d0a-951c-779c1d2e573b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.600245] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775893, 'name': ReconfigVM_Task, 'duration_secs': 0.774077} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.600748] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1504.601163] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1504.658365] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775891, 'name': ReconfigVM_Task, 'duration_secs': 1.619524} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.659651] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Reconfigured VM instance instance-00000035 to attach disk [datastore1] e156aef5-bb56-4c17-9e7e-9419b672c9cf/e156aef5-bb56-4c17-9e7e-9419b672c9cf.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1504.659651] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3156e560-bd20-4f47-a738-078e84722666 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.664494] env[62508]: DEBUG nova.compute.manager [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Received event network-vif-plugged-32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1504.664697] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.664913] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Lock "38289797-ecf5-4207-a164-d70228e4411d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.665384] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Lock "38289797-ecf5-4207-a164-d70228e4411d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.665559] env[62508]: DEBUG nova.compute.manager [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] No waiting events found dispatching network-vif-plugged-32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1504.665715] env[62508]: WARNING nova.compute.manager [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Received unexpected event network-vif-plugged-32066ea0-906e-445b-89fa-625dd3384edf for instance with vm_state building and task_state spawning. [ 1504.665867] env[62508]: DEBUG nova.compute.manager [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Received event network-changed-32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1504.666049] env[62508]: DEBUG nova.compute.manager [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Refreshing instance network info cache due to event network-changed-32066ea0-906e-445b-89fa-625dd3384edf. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1504.666187] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Acquiring lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.669573] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1504.669573] env[62508]: value = "task-1775894" [ 1504.669573] env[62508]: _type = "Task" [ 1504.669573] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.679785] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775894, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.724657] env[62508]: DEBUG nova.network.neutron [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Updating instance_info_cache with network_info: [{"id": "32066ea0-906e-445b-89fa-625dd3384edf", "address": "fa:16:3e:77:6b:bb", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32066ea0-90", "ovs_interfaceid": "32066ea0-906e-445b-89fa-625dd3384edf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.923581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.923581] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1504.926024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.071s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.926440] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.929111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.622s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.929445] env[62508]: DEBUG nova.objects.instance [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lazy-loading 'resources' on Instance uuid 63fca45d-5922-4a14-9936-30070c349f8e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.990382] env[62508]: INFO nova.scheduler.client.report [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted allocations for instance 9a3ef326-0fbf-4fd2-bb5e-3009bf661381 [ 1505.110212] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96245280-606a-4f0b-87e3-64ae798574a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.135139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f39ab3a-17f6-4462-9747-5cb5fcaf259c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.140182] env[62508]: DEBUG nova.compute.manager [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Received event network-vif-plugged-179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1505.140507] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] Acquiring lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.140806] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] Lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.140973] env[62508]: DEBUG oslo_concurrency.lockutils [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] Lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.141958] env[62508]: DEBUG nova.compute.manager [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] No waiting events found dispatching network-vif-plugged-179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1505.142194] env[62508]: WARNING nova.compute.manager [req-3d9c2d61-52c5-4527-9461-6431a59eeb14 req-c12bd768-e11d-48ab-afeb-8128234bd6eb service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Received unexpected event network-vif-plugged-179c7569-ad66-43e9-976d-5c0146e5f13f for instance with vm_state building and task_state spawning. [ 1505.162516] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1505.181695] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775894, 'name': Rename_Task, 'duration_secs': 0.338995} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.181981] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.182280] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d386358-fedc-4774-b480-4cfd831b27b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.191514] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1505.191514] env[62508]: value = "task-1775895" [ 1505.191514] env[62508]: _type = "Task" [ 1505.191514] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.207642] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.227591] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.227933] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance network_info: |[{"id": "32066ea0-906e-445b-89fa-625dd3384edf", "address": "fa:16:3e:77:6b:bb", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32066ea0-90", "ovs_interfaceid": "32066ea0-906e-445b-89fa-625dd3384edf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1505.228564] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Acquired lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.228792] env[62508]: DEBUG nova.network.neutron [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Refreshing network info cache for port 32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1505.229967] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:6b:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32066ea0-906e-445b-89fa-625dd3384edf', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1505.239310] env[62508]: DEBUG oslo.service.loopingcall [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1505.240360] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1505.240605] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a484a7a-d13b-44cf-82bd-7b7e788970e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.263873] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1505.263873] env[62508]: value = "task-1775896" [ 1505.263873] env[62508]: _type = "Task" [ 1505.263873] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.280520] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775896, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.284769] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Successfully updated port: 179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1505.434673] env[62508]: DEBUG nova.compute.utils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.438496] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1505.438609] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.494148] env[62508]: DEBUG nova.policy [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e23c4d8844d4273a264b0dcc148d251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27e6f55b56be40d2a619f0119aefb2ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1505.499209] env[62508]: DEBUG oslo_concurrency.lockutils [None req-426fff85-3a66-45da-ab73-7d8f9cf9afd1 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "9a3ef326-0fbf-4fd2-bb5e-3009bf661381" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.668s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.705228] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.713659] env[62508]: DEBUG nova.network.neutron [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Port cafd5648-99e8-4c28-92bb-439b1d656b15 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1505.777776] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775896, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.792469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.792469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.792469] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1505.851223] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Successfully created port: 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1505.917380] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ffc5c-58eb-4b36-b6b7-e4eff86288cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.928558] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a16e71-2f2b-47de-b14f-5ffb833b1c4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.942428] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1505.980751] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496aa8fc-ad2b-413c-80de-f440097c1fa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.992684] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24b0e01-e6d1-42b0-867a-a8505fd4edf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.010608] env[62508]: DEBUG nova.compute.provider_tree [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.156733] env[62508]: DEBUG nova.network.neutron [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Updated VIF entry in instance network info cache for port 32066ea0-906e-445b-89fa-625dd3384edf. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1506.157112] env[62508]: DEBUG nova.network.neutron [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Updating instance_info_cache with network_info: [{"id": "32066ea0-906e-445b-89fa-625dd3384edf", "address": "fa:16:3e:77:6b:bb", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32066ea0-90", "ovs_interfaceid": "32066ea0-906e-445b-89fa-625dd3384edf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.206113] env[62508]: DEBUG oslo_vmware.api [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775895, 'name': PowerOnVM_Task, 'duration_secs': 0.96239} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.206276] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.207266] env[62508]: INFO nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Took 10.01 seconds to spawn the instance on the hypervisor. [ 1506.207266] env[62508]: DEBUG nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1506.207517] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5373400-8803-4674-8492-dc6465f50747 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.250964] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1506.252241] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b4440c-6e69-4881-9ff5-ded91a57a4d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.260672] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1506.260856] env[62508]: ERROR oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk due to incomplete transfer. [ 1506.261139] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cc2d757f-3092-484a-847b-8629e6e1df99 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.272046] env[62508]: DEBUG oslo_vmware.rw_handles [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f5752c-ada9-6ece-ff29-58016e2fb79e/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1506.272046] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Uploaded image 3b90f393-c75c-47dc-8dbc-3f3647e17ab7 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1506.274621] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1506.275418] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7d5e8745-feda-49d2-9d2d-d75c899b3a76 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.282928] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775896, 'name': CreateVM_Task, 'duration_secs': 0.896154} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.283161] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1506.284554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.284554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.284554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1506.285062] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c62b7b23-a391-4976-9ac1-0008e615acc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.291995] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1506.291995] env[62508]: value = "task-1775897" [ 1506.291995] env[62508]: _type = "Task" [ 1506.291995] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.302486] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1506.302486] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52222cc9-4609-4a00-5704-b49f814eb5f4" [ 1506.302486] env[62508]: _type = "Task" [ 1506.302486] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.311941] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775897, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.318684] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52222cc9-4609-4a00-5704-b49f814eb5f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.341711] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1506.517626] env[62508]: DEBUG nova.scheduler.client.report [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1506.522409] env[62508]: DEBUG nova.network.neutron [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Updating instance_info_cache with network_info: [{"id": "179c7569-ad66-43e9-976d-5c0146e5f13f", "address": "fa:16:3e:5a:bf:4f", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179c7569-ad", "ovs_interfaceid": "179c7569-ad66-43e9-976d-5c0146e5f13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.661653] env[62508]: DEBUG oslo_concurrency.lockutils [req-2ae1ca73-f74c-48aa-ae43-028afa1e9d0c req-875e61af-f96f-4ed5-bc00-d610cd8108cb service nova] Releasing lock "refresh_cache-38289797-ecf5-4207-a164-d70228e4411d" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.751113] env[62508]: INFO nova.compute.manager [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Took 40.44 seconds to build instance. [ 1506.756238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.756238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.756238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.806115] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775897, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.816138] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52222cc9-4609-4a00-5704-b49f814eb5f4, 'name': SearchDatastore_Task, 'duration_secs': 0.062037} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.816502] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.816794] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1506.817094] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.817277] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.817488] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.817776] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fda4251-d7d9-4258-88a8-65efdcf7b196 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.831659] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.831886] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.832718] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d4ea30-71da-4e7c-8803-c0174573f7b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.838572] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1506.838572] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528f2513-9180-a194-f442-b0b5575ef234" [ 1506.838572] env[62508]: _type = "Task" [ 1506.838572] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.847571] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528f2513-9180-a194-f442-b0b5575ef234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.987723] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1507.010856] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1507.011136] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1507.011300] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.011481] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1507.011628] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.011778] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1507.011982] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1507.012523] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1507.012811] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1507.013051] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1507.013357] env[62508]: DEBUG nova.virt.hardware [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1507.014363] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1802829-f8b9-4121-b91e-5bc412d29908 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.024219] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e686a2c-eb70-41fa-b5b6-abc2e153688f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.028632] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.028994] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance network_info: |[{"id": "179c7569-ad66-43e9-976d-5c0146e5f13f", "address": "fa:16:3e:5a:bf:4f", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179c7569-ad", "ovs_interfaceid": "179c7569-ad66-43e9-976d-5c0146e5f13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1507.029728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.101s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.032686] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:bf:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '179c7569-ad66-43e9-976d-5c0146e5f13f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1507.041475] env[62508]: DEBUG oslo.service.loopingcall [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.041790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.794s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.043906] env[62508]: INFO nova.compute.claims [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1507.047383] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1507.049178] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cef66295-d428-4d96-a3ac-d708c32f4b81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.079375] env[62508]: INFO nova.scheduler.client.report [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleted allocations for instance 63fca45d-5922-4a14-9936-30070c349f8e [ 1507.093923] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1507.093923] env[62508]: value = "task-1775898" [ 1507.093923] env[62508]: _type = "Task" [ 1507.093923] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.105057] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775898, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.262893] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5acc9d81-6459-4845-a16e-92e477214111 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.286235] env[62508]: DEBUG nova.compute.manager [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Received event network-changed-179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.286460] env[62508]: DEBUG nova.compute.manager [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Refreshing instance network info cache due to event network-changed-179c7569-ad66-43e9-976d-5c0146e5f13f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1507.286692] env[62508]: DEBUG oslo_concurrency.lockutils [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] Acquiring lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.286838] env[62508]: DEBUG oslo_concurrency.lockutils [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] Acquired lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.287032] env[62508]: DEBUG nova.network.neutron [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Refreshing network info cache for port 179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1507.306390] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775897, 'name': Destroy_Task, 'duration_secs': 1.014856} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.307133] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Destroyed the VM [ 1507.307396] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1507.307657] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a93ca3b0-524a-41ea-9954-2102b940c204 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.315753] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1507.315753] env[62508]: value = "task-1775899" [ 1507.315753] env[62508]: _type = "Task" [ 1507.315753] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.325577] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775899, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.353199] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528f2513-9180-a194-f442-b0b5575ef234, 'name': SearchDatastore_Task, 'duration_secs': 0.028975} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.354181] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-657863c9-6cf9-47bd-927a-c5edfac2b78a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.362121] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1507.362121] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210a710-919c-5582-fd83-8ec71190698e" [ 1507.362121] env[62508]: _type = "Task" [ 1507.362121] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.373146] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210a710-919c-5582-fd83-8ec71190698e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.593240] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76474de3-2f9c-4c8c-9c0f-80189635c0b7 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "63fca45d-5922-4a14-9936-30070c349f8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.433s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.610023] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775898, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.739663] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3515c832-4c94-409d-b436-c11acc2c26c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.751356] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Suspending the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1507.751669] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-150085cd-148a-4724-a25d-4fd121a08a46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.761448] env[62508]: DEBUG oslo_vmware.api [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1507.761448] env[62508]: value = "task-1775900" [ 1507.761448] env[62508]: _type = "Task" [ 1507.761448] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.777596] env[62508]: DEBUG oslo_vmware.api [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775900, 'name': SuspendVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.827847] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775899, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.848840] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.849038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.849231] env[62508]: DEBUG nova.network.neutron [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1507.877999] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210a710-919c-5582-fd83-8ec71190698e, 'name': SearchDatastore_Task, 'duration_secs': 0.012673} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.879217] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.879217] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1507.879217] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-383304be-2d81-4608-91b2-959d895c3943 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.889158] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1507.889158] env[62508]: value = "task-1775901" [ 1507.889158] env[62508]: _type = "Task" [ 1507.889158] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.898931] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775901, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.109329] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775898, 'name': CreateVM_Task, 'duration_secs': 0.521592} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.114108] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1508.115355] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.115549] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.115902] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1508.116221] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c83926cb-77dd-482f-a680-21abf8dd3b9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.124823] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1508.124823] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521b8dc2-e355-5bcd-e36e-9f74c33e582c" [ 1508.124823] env[62508]: _type = "Task" [ 1508.124823] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.146787] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521b8dc2-e355-5bcd-e36e-9f74c33e582c, 'name': SearchDatastore_Task} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.148817] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.149108] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1508.149950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.150080] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.150236] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1508.151299] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-497b86a2-45a8-4f9b-bf9f-528c6b6ea709 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.167504] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1508.167504] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1508.167504] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95d2295e-d1a6-447b-97f7-2e3673f7f0dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.183502] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1508.183502] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0b080-d927-90a7-d697-40f38c28fd6e" [ 1508.183502] env[62508]: _type = "Task" [ 1508.183502] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.199198] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "a226327d-11df-45e0-bef8-2337a0317c9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.199346] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.199572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.199803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.199975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.205033] env[62508]: INFO nova.compute.manager [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Terminating instance [ 1508.205586] env[62508]: DEBUG nova.compute.manager [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1508.205830] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1508.206285] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0b080-d927-90a7-d697-40f38c28fd6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.211436] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Successfully updated port: 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1508.213640] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452feb62-67aa-48dd-97b2-0380f1eb9135 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.224952] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1508.225245] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f2a72ef-1252-4495-a996-474b50b41752 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.234806] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1508.234806] env[62508]: value = "task-1775902" [ 1508.234806] env[62508]: _type = "Task" [ 1508.234806] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.254497] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.279116] env[62508]: DEBUG oslo_vmware.api [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775900, 'name': SuspendVM_Task} progress is 62%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.313474] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.313736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.331927] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775899, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.402498] env[62508]: DEBUG nova.network.neutron [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Updated VIF entry in instance network info cache for port 179c7569-ad66-43e9-976d-5c0146e5f13f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1508.402829] env[62508]: DEBUG nova.network.neutron [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Updating instance_info_cache with network_info: [{"id": "179c7569-ad66-43e9-976d-5c0146e5f13f", "address": "fa:16:3e:5a:bf:4f", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179c7569-ad", "ovs_interfaceid": "179c7569-ad66-43e9-976d-5c0146e5f13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.410918] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775901, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.638895] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f85e365-a19a-4ea8-819f-e2b1d7d52239 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.648515] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6ce482-f282-419b-bbbf-c2dfe2c144db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.691333] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75218192-977c-498d-bc0b-3e05b867b43c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.706207] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f288d4b-f6d4-475d-8ec3-0a573bc3c81c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.710787] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0b080-d927-90a7-d697-40f38c28fd6e, 'name': SearchDatastore_Task, 'duration_secs': 0.058816} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.712290] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ec45d7d-be5c-4d91-a7c8-524b474a696a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.725320] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.726121] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.726121] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.728110] env[62508]: DEBUG nova.compute.provider_tree [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.737037] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1508.737037] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52624241-1d1e-2aaf-1d10-5cfaec102c98" [ 1508.737037] env[62508]: _type = "Task" [ 1508.737037] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.751023] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52624241-1d1e-2aaf-1d10-5cfaec102c98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.755662] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775902, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.772787] env[62508]: DEBUG oslo_vmware.api [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775900, 'name': SuspendVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.816974] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1508.829528] env[62508]: DEBUG oslo_vmware.api [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775899, 'name': RemoveSnapshot_Task, 'duration_secs': 1.298856} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.829792] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1508.830028] env[62508]: INFO nova.compute.manager [None req-e35703a6-fba5-4a27-9b08-fbd42b978d46 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Took 18.81 seconds to snapshot the instance on the hypervisor. [ 1508.850380] env[62508]: DEBUG nova.network.neutron [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.902995] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775901, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680226} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.902995] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1508.903264] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1508.903571] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ee43f98-3d00-4860-a3df-ae6a7bef8234 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.906170] env[62508]: DEBUG oslo_concurrency.lockutils [req-843a383a-cb71-4688-9966-6593aafd3611 req-36c2d500-44f3-4c6f-a50e-af4c81ea34c0 service nova] Releasing lock "refresh_cache-f456dd83-6350-46b2-b06c-41dc5c477358" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.917298] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1508.917298] env[62508]: value = "task-1775903" [ 1508.917298] env[62508]: _type = "Task" [ 1508.917298] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.928051] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.231467] env[62508]: DEBUG nova.scheduler.client.report [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.255141] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775902, 'name': PowerOffVM_Task, 'duration_secs': 0.735354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.259521] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1509.259736] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1509.260166] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52624241-1d1e-2aaf-1d10-5cfaec102c98, 'name': SearchDatastore_Task, 'duration_secs': 0.036946} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.261052] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68a3d448-e7ac-4747-8861-6b121c4671fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.263094] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.263516] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1509.264448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d299055-9ebd-44e1-b1c6-70002ba28cf5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.278777] env[62508]: DEBUG oslo_vmware.api [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775900, 'name': SuspendVM_Task, 'duration_secs': 1.248234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.281168] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1509.283366] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Suspended the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1509.283860] env[62508]: DEBUG nova.compute.manager [None req-e5b8d272-362b-426f-974c-69f8d0558ac0 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1509.284288] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1509.284288] env[62508]: value = "task-1775905" [ 1509.284288] env[62508]: _type = "Task" [ 1509.284288] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.285331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fad617-69b8-411e-b420-8e6c150941c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.306756] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.340920] env[62508]: DEBUG nova.compute.manager [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-vif-plugged-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.341236] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.341457] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.341639] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.341781] env[62508]: DEBUG nova.compute.manager [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] No waiting events found dispatching network-vif-plugged-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1509.341937] env[62508]: WARNING nova.compute.manager [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received unexpected event network-vif-plugged-458dc468-1ae9-4f09-b0e2-4c866362fb80 for instance with vm_state building and task_state spawning. [ 1509.342103] env[62508]: DEBUG nova.compute.manager [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.342330] env[62508]: DEBUG nova.compute.manager [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing instance network info cache due to event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1509.342506] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.345720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.351519] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.429136] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07232} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.429574] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1509.430740] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3e6652-517f-4bee-a571-0c4adad9922e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.441314] env[62508]: DEBUG nova.network.neutron [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.466592] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1509.468262] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.468654] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Instance network_info: |[{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1509.469040] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfefbe88-762a-4641-81c8-4f3280f9abe7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.487614] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.487794] env[62508]: DEBUG nova.network.neutron [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.489735] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:69:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '458dc468-1ae9-4f09-b0e2-4c866362fb80', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.497761] env[62508]: DEBUG oslo.service.loopingcall [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.500328] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1509.500935] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1509.501218] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1509.501431] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleting the datastore file [datastore1] a226327d-11df-45e0-bef8-2337a0317c9e {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1509.502550] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-566425e1-227d-4ce4-83d9-717dafa0db5c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.518658] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9755c41b-3d50-48c0-ab35-b0ace409ef8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.525400] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1509.525400] env[62508]: value = "task-1775906" [ 1509.525400] env[62508]: _type = "Task" [ 1509.525400] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.529989] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for the task: (returnval){ [ 1509.529989] env[62508]: value = "task-1775907" [ 1509.529989] env[62508]: _type = "Task" [ 1509.529989] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.531501] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.531501] env[62508]: value = "task-1775908" [ 1509.531501] env[62508]: _type = "Task" [ 1509.531501] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.538997] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.549397] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.553274] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775908, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.739382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.739953] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1509.742993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.981s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.743161] env[62508]: DEBUG nova.objects.instance [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lazy-loading 'resources' on Instance uuid 879f1e09-8b21-4f89-bc00-04e3d6710662 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1509.803204] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775905, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.876025] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fef72e-25f7-47dc-9658-03814f099ba6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.902501] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e2a8f0-cf81-4f78-8765-7cf719254515 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.911285] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1510.036554] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.050639] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775908, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.054574] env[62508]: DEBUG oslo_vmware.api [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Task: {'id': task-1775907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497419} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.054829] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.055016] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.055208] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.055378] env[62508]: INFO nova.compute.manager [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Took 1.85 seconds to destroy the instance on the hypervisor. [ 1510.055691] env[62508]: DEBUG oslo.service.loopingcall [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.058670] env[62508]: DEBUG nova.compute.manager [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1510.058845] env[62508]: DEBUG nova.network.neutron [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.248752] env[62508]: DEBUG nova.compute.utils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1510.254018] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1510.254440] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.300957] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55823} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.303958] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1510.304425] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1510.304980] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8970320b-12ee-4cdb-ae91-ba2ec8659a1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.314255] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1510.314255] env[62508]: value = "task-1775909" [ 1510.314255] env[62508]: _type = "Task" [ 1510.314255] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.326499] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.346820] env[62508]: DEBUG nova.policy [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb7bd4b3c5d844149357f18dda723562', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9b1180071bc4cc2a419daac2f51e3f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1510.419912] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1510.419912] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-124ee8b9-a432-4600-83a9-3aea6dcb1711 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.430674] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1510.430674] env[62508]: value = "task-1775910" [ 1510.430674] env[62508]: _type = "Task" [ 1510.430674] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.442136] env[62508]: DEBUG nova.network.neutron [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updated VIF entry in instance network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1510.442513] env[62508]: DEBUG nova.network.neutron [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.443766] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.522319] env[62508]: DEBUG nova.compute.manager [req-8b2bb5be-2f6c-48f8-a756-ab3eea4e491e req-c5320f80-1675-453d-8d2b-d8325de1bf3a service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Received event network-vif-deleted-bcf067d3-85ff-44e6-bb5d-153c8b425360 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1510.522527] env[62508]: INFO nova.compute.manager [req-8b2bb5be-2f6c-48f8-a756-ab3eea4e491e req-c5320f80-1675-453d-8d2b-d8325de1bf3a service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Neutron deleted interface bcf067d3-85ff-44e6-bb5d-153c8b425360; detaching it from the instance and deleting it from the info cache [ 1510.522722] env[62508]: DEBUG nova.network.neutron [req-8b2bb5be-2f6c-48f8-a756-ab3eea4e491e req-c5320f80-1675-453d-8d2b-d8325de1bf3a service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.537301] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775906, 'name': ReconfigVM_Task, 'duration_secs': 0.660509} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.540949] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.542091] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7adbbb2-2025-414e-939b-52b9e074d3f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.554518] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775908, 'name': CreateVM_Task, 'duration_secs': 0.60866} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.558522] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.558522] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1510.558522] env[62508]: value = "task-1775911" [ 1510.558522] env[62508]: _type = "Task" [ 1510.558522] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.560258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.560412] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.560731] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.561055] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-944ac3af-17d6-4f42-9896-45731091e537 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.578089] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775911, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.578089] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1510.578089] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a27cb5-8bd8-a6c8-1d88-a4e60df874d3" [ 1510.578089] env[62508]: _type = "Task" [ 1510.578089] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.586450] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a27cb5-8bd8-a6c8-1d88-a4e60df874d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.633324] env[62508]: DEBUG nova.compute.manager [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1510.634665] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad116c7-1831-4f01-a647-369225846be4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.693980] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Successfully created port: 9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.756692] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1510.829773] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064014} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.830545] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1510.831983] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372f35f5-fd7d-4d65-b554-77d7694c4316 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.862296] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.863024] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b133c2-f9f0-4cc4-9b44-851396ae6db2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.893344] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1510.893344] env[62508]: value = "task-1775912" [ 1510.893344] env[62508]: _type = "Task" [ 1510.893344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.898353] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148b79bb-42fc-4cfc-91f4-411aae62eba4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.904285] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775912, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.909608] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c679c72-4181-46b0-bee4-5bb05af2dc80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.949888] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce5c454-e0da-454e-9caf-52552279455d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.953554] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5ffd6ad-1bb1-4e8f-a07e-97f97bcf8b24 req-4eec2920-a2ee-4482-8a98-1a5d50e9bb12 service nova] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.960618] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775910, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.964052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e11bf9-408f-4f8a-9671-5b237390612f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.969202] env[62508]: DEBUG nova.network.neutron [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.980150] env[62508]: DEBUG nova.compute.provider_tree [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.025153] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0535d0a5-5bc6-4da5-915e-ca2be2c474d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.037638] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf39f599-2beb-4128-a7f8-a62a6073e018 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.081565] env[62508]: DEBUG nova.compute.manager [req-8b2bb5be-2f6c-48f8-a756-ab3eea4e491e req-c5320f80-1675-453d-8d2b-d8325de1bf3a service nova] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Detach interface failed, port_id=bcf067d3-85ff-44e6-bb5d-153c8b425360, reason: Instance a226327d-11df-45e0-bef8-2337a0317c9e could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1511.091044] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775911, 'name': Rename_Task, 'duration_secs': 0.21854} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.091044] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.091044] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb76f05f-0052-42ba-978f-914966db366b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.095567] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a27cb5-8bd8-a6c8-1d88-a4e60df874d3, 'name': SearchDatastore_Task, 'duration_secs': 0.022439} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.096289] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.096520] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.096751] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.096890] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.097079] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.097361] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2f88ac5-ed24-4ad1-be62-a65517cc8709 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.104877] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1511.104877] env[62508]: value = "task-1775913" [ 1511.104877] env[62508]: _type = "Task" [ 1511.104877] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.109077] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.112175] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.112175] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80d7bdb3-2490-4dd1-94ae-fa6f369c7487 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.116556] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.117962] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1511.117962] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521b6f52-fb2d-a3ca-f805-8d22595463fe" [ 1511.117962] env[62508]: _type = "Task" [ 1511.117962] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.126839] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521b6f52-fb2d-a3ca-f805-8d22595463fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.150347] env[62508]: INFO nova.compute.manager [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] instance snapshotting [ 1511.153316] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8069268d-1368-437f-a1cb-526bfcedfc74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.174912] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e90803-6704-4c87-b888-ddb3056945c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.403267] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775912, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.455985] env[62508]: DEBUG oslo_vmware.api [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1775910, 'name': PowerOnVM_Task, 'duration_secs': 0.70486} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.456084] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.456299] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95558e40-8da8-413a-80ca-3e20b1e6ecac tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance 'e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1511.469759] env[62508]: INFO nova.compute.manager [-] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Took 1.41 seconds to deallocate network for instance. [ 1511.483603] env[62508]: DEBUG nova.scheduler.client.report [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1511.615743] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775913, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.628551] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521b6f52-fb2d-a3ca-f805-8d22595463fe, 'name': SearchDatastore_Task, 'duration_secs': 0.023385} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.629622] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f70b5ff-ff08-40b9-82be-5adeb139e8dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.636341] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1511.636341] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52729541-7f04-6e2f-2cb3-47a31dcdf136" [ 1511.636341] env[62508]: _type = "Task" [ 1511.636341] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.644742] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52729541-7f04-6e2f-2cb3-47a31dcdf136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.688320] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1511.688680] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fb0e444e-bbe4-437e-858b-938dfba34825 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.697615] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1511.697615] env[62508]: value = "task-1775914" [ 1511.697615] env[62508]: _type = "Task" [ 1511.697615] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.709163] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775914, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.765648] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1511.798262] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1511.798506] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1511.799568] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.799568] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1511.799568] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.799568] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1511.799568] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1511.799775] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1511.799775] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1511.799938] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1511.800673] env[62508]: DEBUG nova.virt.hardware [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1511.801036] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6da9c21-809b-4fa5-84f9-f77ed38d0d5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.809957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d676090-7df4-4bf0-94f4-3975f171be2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.888896] env[62508]: DEBUG nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1511.889848] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5ca5c5-b669-4f0b-8d42-24ac41c62beb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.907201] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775912, 'name': ReconfigVM_Task, 'duration_secs': 0.719321} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.907201] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Reconfigured VM instance instance-00000037 to attach disk [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1511.907567] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ca864a9-2d71-4be5-ac86-f5df05a6c5e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.915649] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1511.915649] env[62508]: value = "task-1775915" [ 1511.915649] env[62508]: _type = "Task" [ 1511.915649] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.930888] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775915, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.975937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.989707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.247s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.993065] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.117s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.994774] env[62508]: INFO nova.compute.claims [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.024095] env[62508]: INFO nova.scheduler.client.report [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleted allocations for instance 879f1e09-8b21-4f89-bc00-04e3d6710662 [ 1512.118105] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775913, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.151974] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52729541-7f04-6e2f-2cb3-47a31dcdf136, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.152129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.152712] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1512.152932] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bbcd7b0-9397-43e6-af4c-2fc4a23ed9e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.162811] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1512.162811] env[62508]: value = "task-1775916" [ 1512.162811] env[62508]: _type = "Task" [ 1512.162811] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.181020] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.211298] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775914, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.354971] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Successfully updated port: 9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.403740] env[62508]: INFO nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] instance snapshotting [ 1512.404263] env[62508]: WARNING nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1512.407937] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0d693d-0257-45be-b4f6-c52a8d19b891 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.436934] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bc3db1-60ba-45e6-af9a-75141cc9d16a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.449753] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775915, 'name': Rename_Task, 'duration_secs': 0.195052} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.451867] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1512.451867] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56d836e6-2ec6-486e-9234-ab1e3400d645 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.457853] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1512.457853] env[62508]: value = "task-1775917" [ 1512.457853] env[62508]: _type = "Task" [ 1512.457853] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.476450] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.538027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5c0c06cc-bb07-4f89-ba51-5641c4b4b9f6 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "879f1e09-8b21-4f89-bc00-04e3d6710662" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.765s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.552824] env[62508]: DEBUG nova.compute.manager [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Received event network-vif-plugged-9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1512.553040] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Acquiring lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.553341] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.553525] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.553696] env[62508]: DEBUG nova.compute.manager [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] No waiting events found dispatching network-vif-plugged-9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1512.553863] env[62508]: WARNING nova.compute.manager [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Received unexpected event network-vif-plugged-9b029442-1a65-4a97-ac36-d00548fe1f7d for instance with vm_state building and task_state spawning. [ 1512.554036] env[62508]: DEBUG nova.compute.manager [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Received event network-changed-9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1512.554198] env[62508]: DEBUG nova.compute.manager [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Refreshing instance network info cache due to event network-changed-9b029442-1a65-4a97-ac36-d00548fe1f7d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1512.554411] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Acquiring lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.554555] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Acquired lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.554828] env[62508]: DEBUG nova.network.neutron [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Refreshing network info cache for port 9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1512.617667] env[62508]: DEBUG oslo_vmware.api [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775913, 'name': PowerOnVM_Task, 'duration_secs': 1.023949} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.618724] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1512.619047] env[62508]: INFO nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Took 10.97 seconds to spawn the instance on the hypervisor. [ 1512.619243] env[62508]: DEBUG nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1512.620128] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9942257-5b82-4a2d-80a9-1e49f52db390 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.678126] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775916, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.715206] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775914, 'name': CreateSnapshot_Task, 'duration_secs': 0.513641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.715557] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1512.716405] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c14181-a5b4-4c64-86a1-c4b8d9152a59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.857082] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.955240] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1512.955561] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-00ae62a1-5146-40f5-94b2-a44f3fe998d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.970776] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.976024] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1512.976024] env[62508]: value = "task-1775918" [ 1512.976024] env[62508]: _type = "Task" [ 1512.976024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.994469] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775918, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.109934] env[62508]: DEBUG nova.network.neutron [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.142922] env[62508]: INFO nova.compute.manager [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Took 43.35 seconds to build instance. [ 1513.180567] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699942} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.184293] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1513.184986] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1513.185230] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-051f481e-7cef-409d-bf26-56a8f2345298 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.196600] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1513.196600] env[62508]: value = "task-1775919" [ 1513.196600] env[62508]: _type = "Task" [ 1513.196600] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.211145] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775919, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.246422] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1513.246815] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3c88293d-3341-4bcd-b4fd-8f410bd294f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.253874] env[62508]: DEBUG nova.network.neutron [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.264724] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1513.264724] env[62508]: value = "task-1775920" [ 1513.264724] env[62508]: _type = "Task" [ 1513.264724] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.276086] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.468481] env[62508]: DEBUG oslo_vmware.api [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775917, 'name': PowerOnVM_Task, 'duration_secs': 0.859233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.468754] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1513.468960] env[62508]: INFO nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Took 9.23 seconds to spawn the instance on the hypervisor. [ 1513.469155] env[62508]: DEBUG nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1513.473463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d37ddbe-9ddd-4770-b6cf-078d5bccb115 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.492213] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775918, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.635315] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00e9c12-3910-461a-933e-35ac2e2932e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.644991] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a89742-1ac9-4b30-9590-c9f44189e352 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.648299] env[62508]: DEBUG oslo_concurrency.lockutils [None req-533cd7be-6050-4fb4-97fa-1f7e514cef94 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.791s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.681886] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa7da5c-8b2a-4e7e-976d-68dfb2a7f7d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.690623] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09270fa8-40d2-4aab-8ae1-5195a0697a68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.709237] env[62508]: DEBUG nova.compute.provider_tree [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1513.719821] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775919, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081442} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.720739] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1513.721645] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cf756f-7d39-4e2f-a479-78724f1f534b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.745786] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1513.746369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1714691-b93e-4cd3-9e0f-bb5844a4f651 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.761291] env[62508]: DEBUG oslo_concurrency.lockutils [req-5267eff5-9dab-4ca2-a55a-4addeb0437ae req-fb20a1d0-2656-483c-8a69-a0fac9ed24a6 service nova] Releasing lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.761669] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.761842] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1513.771802] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1513.771802] env[62508]: value = "task-1775921" [ 1513.771802] env[62508]: _type = "Task" [ 1513.771802] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.778901] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.784740] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775921, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.991263] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775918, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.006254] env[62508]: INFO nova.compute.manager [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Took 42.53 seconds to build instance. [ 1514.082687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.082971] env[62508]: DEBUG oslo_concurrency.lockutils [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.083259] env[62508]: DEBUG nova.compute.manager [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1514.084205] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4692a1c-bcf6-46fc-8e36-ace48c2e41f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.095101] env[62508]: DEBUG nova.compute.manager [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1514.095101] env[62508]: DEBUG nova.objects.instance [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'flavor' on Instance uuid 38289797-ecf5-4207-a164-d70228e4411d {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1514.239312] env[62508]: ERROR nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [req-ce409f62-a4ed-4882-a9ee-58b665dc8d90] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ce409f62-a4ed-4882-a9ee-58b665dc8d90"}]} [ 1514.259213] env[62508]: DEBUG nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1514.280343] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.282100] env[62508]: DEBUG nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1514.283421] env[62508]: DEBUG nova.compute.provider_tree [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1514.294769] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775921, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.297882] env[62508]: DEBUG nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1514.323773] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1514.326327] env[62508]: DEBUG nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1514.499104] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775918, 'name': CreateSnapshot_Task, 'duration_secs': 1.218571} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.499310] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1514.500201] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7e7f5d-f00b-4c97-99de-15c7d1c45a1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.515779] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c4e309b-a3e5-41af-bfed-6bfecf207e75 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.023s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.533630] env[62508]: DEBUG nova.network.neutron [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Updating instance_info_cache with network_info: [{"id": "9b029442-1a65-4a97-ac36-d00548fe1f7d", "address": "fa:16:3e:98:40:cd", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b029442-1a", "ovs_interfaceid": "9b029442-1a65-4a97-ac36-d00548fe1f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.601069] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.601369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65f72271-3157-486c-b91a-58655fc0bca5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.613968] env[62508]: DEBUG oslo_vmware.api [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1514.613968] env[62508]: value = "task-1775922" [ 1514.613968] env[62508]: _type = "Task" [ 1514.613968] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.633707] env[62508]: DEBUG oslo_vmware.api [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.659186] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.659496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.669118] env[62508]: DEBUG nova.network.neutron [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Port cafd5648-99e8-4c28-92bb-439b1d656b15 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1514.669860] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.669933] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.670164] env[62508]: DEBUG nova.network.neutron [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1514.781954] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.787913] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775921, 'name': ReconfigVM_Task, 'duration_secs': 0.592023} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.789043] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfigured VM instance instance-00000038 to attach disk [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1514.790145] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424d0a8f-2731-4a0b-aa67-b93d840bbd1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.792812] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4413335a-310a-4838-bee3-4cf6a37b71f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.801703] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85dd9f4-f63e-4909-8183-dd1515daf701 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.805072] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1514.805072] env[62508]: value = "task-1775923" [ 1514.805072] env[62508]: _type = "Task" [ 1514.805072] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.839990] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df78ce9-7ff4-4855-bb68-2951048dc7e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.843382] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775923, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.849402] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4213ebc-f536-4b5b-93b7-9c7c8aead902 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.864195] env[62508]: DEBUG nova.compute.provider_tree [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1515.030381] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1515.031280] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb964c12-6f65-4e33-a047-d3b03ab76b6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.036329] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "refresh_cache-6ae078f6-3b96-4b49-b282-cae74d742c97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.036627] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance network_info: |[{"id": "9b029442-1a65-4a97-ac36-d00548fe1f7d", "address": "fa:16:3e:98:40:cd", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b029442-1a", "ovs_interfaceid": "9b029442-1a65-4a97-ac36-d00548fe1f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1515.036999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:40:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd829efb7-e98e-4b67-bd03-b0888287dbfd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b029442-1a65-4a97-ac36-d00548fe1f7d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1515.044694] env[62508]: DEBUG oslo.service.loopingcall [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.046433] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1515.046764] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1515.046764] env[62508]: value = "task-1775924" [ 1515.046764] env[62508]: _type = "Task" [ 1515.046764] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.046951] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-158d7b65-537e-476d-b844-9f60c0b4f21c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.071997] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 11%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.073282] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1515.073282] env[62508]: value = "task-1775925" [ 1515.073282] env[62508]: _type = "Task" [ 1515.073282] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.085360] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775925, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.129099] env[62508]: DEBUG oslo_vmware.api [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775922, 'name': PowerOffVM_Task, 'duration_secs': 0.422715} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.129457] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1515.129659] env[62508]: DEBUG nova.compute.manager [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1515.130607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d6da55-1a58-4d6b-9541-a7ba1202d4b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.163184] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1515.279524] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.314753] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775923, 'name': Rename_Task, 'duration_secs': 0.153361} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.318551] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1515.318968] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-863bcd71-4793-4c2e-a7f7-c137134721d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.327349] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1515.327349] env[62508]: value = "task-1775926" [ 1515.327349] env[62508]: _type = "Task" [ 1515.327349] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.336225] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.394689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.394987] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.395217] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.395403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.395619] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.398054] env[62508]: INFO nova.compute.manager [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Terminating instance [ 1515.400102] env[62508]: DEBUG nova.compute.manager [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1515.400365] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1515.401294] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a477da18-eee1-4769-a9df-dd9af8ed5cab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.405253] env[62508]: DEBUG nova.scheduler.client.report [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 83 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1515.405531] env[62508]: DEBUG nova.compute.provider_tree [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 83 to 84 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1515.405749] env[62508]: DEBUG nova.compute.provider_tree [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1515.416684] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1515.416982] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db8bd5af-c9c2-452f-acaf-78d840537d4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.425208] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1515.425208] env[62508]: value = "task-1775927" [ 1515.425208] env[62508]: _type = "Task" [ 1515.425208] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.435318] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.451024] env[62508]: DEBUG nova.network.neutron [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.513682] env[62508]: INFO nova.compute.manager [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Rebuilding instance [ 1515.561027] env[62508]: DEBUG nova.compute.manager [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1515.561652] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5bf658-ac36-4cd6-b168-472be238aa0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.575678] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.589242] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775925, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.644404] env[62508]: DEBUG oslo_concurrency.lockutils [None req-82474341-d7ee-44c2-a6e0-4c63c53d84aa tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.561s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.682100] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.777668] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.839172] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775926, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.913431] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.921s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.914046] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1515.917082] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.330s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.917298] env[62508]: DEBUG nova.objects.instance [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lazy-loading 'resources' on Instance uuid 2b166aa9-9381-42c0-a607-7d610f08a4e3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1515.936921] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775927, 'name': PowerOffVM_Task, 'duration_secs': 0.223251} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.937192] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1515.937381] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1515.937644] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6659b738-c12b-4860-a5e4-014957f03ff5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.954496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.076747] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.087226] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1516.087226] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775925, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.087367] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16ad57e4-2b19-4fa2-a291-f3ab4d1322a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.095148] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1516.095148] env[62508]: value = "task-1775929" [ 1516.095148] env[62508]: _type = "Task" [ 1516.095148] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.104502] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.277748] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.340252] env[62508]: DEBUG oslo_vmware.api [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1775926, 'name': PowerOnVM_Task, 'duration_secs': 0.795126} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.340522] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1516.340732] env[62508]: INFO nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1516.340913] env[62508]: DEBUG nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1516.341718] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ed8a30-f612-403b-982d-e10186359952 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.423136] env[62508]: DEBUG nova.compute.utils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1516.425024] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1516.425108] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1516.458607] env[62508]: DEBUG nova.compute.manager [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62508) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1516.458820] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.482375] env[62508]: DEBUG nova.policy [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4defc57a20554c31a366b75df3407ad6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4535e0943ae14d8abb9b17c590fd0f4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1516.576331] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.588562] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775925, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.612021] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775929, 'name': PowerOffVM_Task, 'duration_secs': 0.317334} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.612021] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1516.612021] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1516.612021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca2d0b6-3ee2-4536-997b-b19c1595400b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.621527] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1516.621527] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-155e8db4-d60a-4023-b150-69906f5f6cbf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.645276] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1516.645874] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1516.645874] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleting the datastore file [datastore1] 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1516.654059] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c149a2b-6021-4edf-b5b2-89d52d1f6eac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.666279] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for the task: (returnval){ [ 1516.666279] env[62508]: value = "task-1775931" [ 1516.666279] env[62508]: _type = "Task" [ 1516.666279] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.685127] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.754260] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1516.754522] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1516.754681] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1516.754943] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0894e30a-9a7c-4052-8984-d094b718c2d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.763458] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1516.763458] env[62508]: value = "task-1775932" [ 1516.763458] env[62508]: _type = "Task" [ 1516.763458] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.776100] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.784171] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775920, 'name': CloneVM_Task, 'duration_secs': 3.484784} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.784521] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Created linked-clone VM from snapshot [ 1516.786009] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84423208-7604-4c5e-9b71-c6c3d8c4785a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.795794] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Uploading image 2035c814-f538-4a5e-ae70-807d6ea61161 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1516.820483] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1516.820483] env[62508]: value = "vm-368695" [ 1516.820483] env[62508]: _type = "VirtualMachine" [ 1516.820483] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1516.821146] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b596f233-9234-41ef-a0a2-93691f697438 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.830825] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease: (returnval){ [ 1516.830825] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f1fb9-a0bc-8f53-818e-06b830db83de" [ 1516.830825] env[62508]: _type = "HttpNfcLease" [ 1516.830825] env[62508]: } obtained for exporting VM: (result){ [ 1516.830825] env[62508]: value = "vm-368695" [ 1516.830825] env[62508]: _type = "VirtualMachine" [ 1516.830825] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1516.831335] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the lease: (returnval){ [ 1516.831335] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f1fb9-a0bc-8f53-818e-06b830db83de" [ 1516.831335] env[62508]: _type = "HttpNfcLease" [ 1516.831335] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1516.843540] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1516.843540] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f1fb9-a0bc-8f53-818e-06b830db83de" [ 1516.843540] env[62508]: _type = "HttpNfcLease" [ 1516.843540] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1516.843847] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1516.843847] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f1fb9-a0bc-8f53-818e-06b830db83de" [ 1516.843847] env[62508]: _type = "HttpNfcLease" [ 1516.843847] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1516.844664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e96fc1-faac-40d6-ad40-248a64659ad3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.864503] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1516.864708] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1516.866389] env[62508]: INFO nova.compute.manager [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Took 41.10 seconds to build instance. [ 1516.929450] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1516.934654] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Successfully created port: 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1516.978386] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09939f48-1a7b-425c-afa5-bf9faec85dc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.988753] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2315f957-52a9-4a54-aea8-0f6c19ea70d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.023012] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-982d960e-981f-461b-a8c9-9922020c0b89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.026172] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d859d215-325b-4b20-a12f-75c03d5d8815 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.036648] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf39655-3c88-4f25-aa6a-ab6c01b6922b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.042823] env[62508]: INFO nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Rebuilding instance [ 1517.058152] env[62508]: DEBUG nova.compute.provider_tree [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1517.080921] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.097639] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775925, 'name': CreateVM_Task, 'duration_secs': 1.826233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.100941] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1517.101755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.101960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.102326] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1517.104687] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea5cef82-539f-4c79-91ed-be666bcd0399 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.106772] env[62508]: DEBUG nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1517.107618] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106a2a47-caf6-4914-9bc3-4a54b1bddd73 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.117179] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1517.117179] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52226b18-1fcb-374e-eaa5-e49045bffeed" [ 1517.117179] env[62508]: _type = "Task" [ 1517.117179] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.130729] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52226b18-1fcb-374e-eaa5-e49045bffeed, 'name': SearchDatastore_Task, 'duration_secs': 0.012431} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.131047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.131327] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1517.131565] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.131714] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.131897] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.132180] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ec945a-5e8b-443a-8398-54d3722851fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.141732] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.141922] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1517.142878] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41ee1f9a-92c4-4495-8fdc-346317332db3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.149639] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1517.149639] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca5a0c-a9d6-31e3-d6ef-9f78c006454f" [ 1517.149639] env[62508]: _type = "Task" [ 1517.149639] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.161833] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca5a0c-a9d6-31e3-d6ef-9f78c006454f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.178993] env[62508]: DEBUG oslo_vmware.api [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Task: {'id': task-1775931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182932} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.179358] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1517.179827] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1517.179827] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1517.180072] env[62508]: INFO nova.compute.manager [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Took 1.78 seconds to destroy the instance on the hypervisor. [ 1517.180395] env[62508]: DEBUG oslo.service.loopingcall [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.180658] env[62508]: DEBUG nova.compute.manager [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1517.180764] env[62508]: DEBUG nova.network.neutron [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1517.275827] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176984} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.276384] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1517.276592] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1517.276814] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1517.369485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3420d5bf-64cf-4fd6-be96-325f6d89647d tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.875s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.577777] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.595628] env[62508]: DEBUG nova.scheduler.client.report [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 84 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1517.595876] env[62508]: DEBUG nova.compute.provider_tree [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 84 to 85 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1517.596064] env[62508]: DEBUG nova.compute.provider_tree [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1517.610890] env[62508]: DEBUG nova.compute.manager [req-5fe1b865-cc49-4fc5-ad02-5b5f46a74091 req-088760d5-0d13-42f7-844c-c8222e53ab5c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Received event network-vif-deleted-a4c6f41a-0293-43f8-a413-d6181d46187c {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.610890] env[62508]: INFO nova.compute.manager [req-5fe1b865-cc49-4fc5-ad02-5b5f46a74091 req-088760d5-0d13-42f7-844c-c8222e53ab5c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Neutron deleted interface a4c6f41a-0293-43f8-a413-d6181d46187c; detaching it from the instance and deleting it from the info cache [ 1517.610890] env[62508]: DEBUG nova.network.neutron [req-5fe1b865-cc49-4fc5-ad02-5b5f46a74091 req-088760d5-0d13-42f7-844c-c8222e53ab5c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.625122] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.626331] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7de98a4e-ebe8-4c50-96b1-e1f386d2c47c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.634881] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1517.634881] env[62508]: value = "task-1775934" [ 1517.634881] env[62508]: _type = "Task" [ 1517.634881] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.647377] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1517.647925] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.648745] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7dbac5-fdd3-422a-865d-f2314d77eccd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.659916] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1517.664508] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed8c8e29-5c0b-43d7-b9c7-9864f9b0d4c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.665443] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ca5a0c-a9d6-31e3-d6ef-9f78c006454f, 'name': SearchDatastore_Task, 'duration_secs': 0.014915} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.666659] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7c1a165-03e7-4f43-9013-1ab7790e4809 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.672572] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1517.672572] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52651b0c-fe39-3127-7b23-78902396b082" [ 1517.672572] env[62508]: _type = "Task" [ 1517.672572] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.681132] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52651b0c-fe39-3127-7b23-78902396b082, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.799366] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1517.799805] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1517.800094] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] 38289797-ecf5-4207-a164-d70228e4411d {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1517.800445] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79df214b-e731-4f18-9ca0-501928eb580d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.807213] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1517.807213] env[62508]: value = "task-1775936" [ 1517.807213] env[62508]: _type = "Task" [ 1517.807213] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.818204] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.943964] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1517.972803] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1517.973140] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1517.973357] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.973600] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1517.973816] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.974097] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1517.975159] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1517.975159] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1517.975159] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1517.975159] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1517.975159] env[62508]: DEBUG nova.virt.hardware [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1517.976141] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d52cf5d-c3af-4308-ba38-dcdb41ef5eb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.985999] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502120ab-3575-4200-8701-5521ec27a6f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.042111] env[62508]: DEBUG nova.network.neutron [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.083418] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775924, 'name': CloneVM_Task, 'duration_secs': 2.615459} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.083887] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Created linked-clone VM from snapshot [ 1518.085218] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa332e3-8652-4819-a577-a67a87d9da80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.092813] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Uploading image 0cfee855-f8be-44c1-acb6-ed4711019248 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1518.106820] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.109223] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.026s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.111343] env[62508]: INFO nova.compute.claims [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1518.117065] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7bc8e63-6424-4f9f-950c-32366c15ea16 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.124474] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1518.124474] env[62508]: value = "vm-368697" [ 1518.124474] env[62508]: _type = "VirtualMachine" [ 1518.124474] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1518.124848] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-64a9fa30-7d6c-49a9-9ee1-940c55090008 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.132698] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8d5d95-5925-489f-8af8-b2cf264c230a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.152248] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease: (returnval){ [ 1518.152248] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b9db2e-6698-51ec-1340-79f382bf5597" [ 1518.152248] env[62508]: _type = "HttpNfcLease" [ 1518.152248] env[62508]: } obtained for exporting VM: (result){ [ 1518.152248] env[62508]: value = "vm-368697" [ 1518.152248] env[62508]: _type = "VirtualMachine" [ 1518.152248] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1518.152760] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the lease: (returnval){ [ 1518.152760] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b9db2e-6698-51ec-1340-79f382bf5597" [ 1518.152760] env[62508]: _type = "HttpNfcLease" [ 1518.152760] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1518.153865] env[62508]: INFO nova.scheduler.client.report [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Deleted allocations for instance 2b166aa9-9381-42c0-a607-7d610f08a4e3 [ 1518.168352] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1518.168352] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b9db2e-6698-51ec-1340-79f382bf5597" [ 1518.168352] env[62508]: _type = "HttpNfcLease" [ 1518.168352] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1518.183549] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1518.183549] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b9db2e-6698-51ec-1340-79f382bf5597" [ 1518.183549] env[62508]: _type = "HttpNfcLease" [ 1518.183549] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1518.184562] env[62508]: DEBUG nova.compute.manager [req-5fe1b865-cc49-4fc5-ad02-5b5f46a74091 req-088760d5-0d13-42f7-844c-c8222e53ab5c service nova] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Detach interface failed, port_id=a4c6f41a-0293-43f8-a413-d6181d46187c, reason: Instance 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1518.185772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1f2cc6-f61f-4f1b-84a9-505ecad40955 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.197144] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1518.197144] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1518.202232] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52651b0c-fe39-3127-7b23-78902396b082, 'name': SearchDatastore_Task, 'duration_secs': 0.016066} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.203618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.203906] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 6ae078f6-3b96-4b49-b282-cae74d742c97/6ae078f6-3b96-4b49-b282-cae74d742c97.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1518.260521] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ab568b3-9b16-485b-a7ce-4ec32db04b7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.270924] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1518.270924] env[62508]: value = "task-1775938" [ 1518.270924] env[62508]: _type = "Task" [ 1518.270924] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.280045] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.317598] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4e61c94e-800c-4df2-a5ff-9892a0e7c8fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.323553] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.324352] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1518.324754] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1518.325048] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1518.339578] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1518.339935] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1518.340209] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.340513] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1518.340719] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.340903] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1518.341246] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1518.341512] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1518.341752] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1518.342007] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1518.342260] env[62508]: DEBUG nova.virt.hardware [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1518.345011] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e65f822-7f62-443a-917c-1a0be234c8f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.355919] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f0300a-a333-4360-999f-d986b5f967c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.371473] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:bf:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '179c7569-ad66-43e9-976d-5c0146e5f13f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1518.379183] env[62508]: DEBUG oslo.service.loopingcall [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1518.386260] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1518.386260] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2666913-8557-445d-92e7-93435e684b95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.411175] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1518.411175] env[62508]: value = "task-1775939" [ 1518.411175] env[62508]: _type = "Task" [ 1518.411175] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.424022] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775939, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.545218] env[62508]: INFO nova.compute.manager [-] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Took 1.36 seconds to deallocate network for instance. [ 1518.636840] env[62508]: DEBUG nova.compute.manager [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1518.637625] env[62508]: DEBUG nova.compute.manager [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing instance network info cache due to event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1518.637625] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.638019] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.638325] env[62508]: DEBUG nova.network.neutron [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1518.667437] env[62508]: DEBUG oslo_concurrency.lockutils [None req-be732277-4d8d-48f9-b545-6a714787344d tempest-ServersTestMultiNic-168775600 tempest-ServersTestMultiNic-168775600-project-member] Lock "2b166aa9-9381-42c0-a607-7d610f08a4e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.389s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.784175] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775938, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.799990] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Successfully updated port: 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1518.928177] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775939, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.055180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.289597] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775938, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.305596] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.305808] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquired lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.306074] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.428399] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775939, 'name': CreateVM_Task, 'duration_secs': 0.601594} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.428690] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1519.429489] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.429815] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.430273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1519.430774] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6229008f-bda5-48d3-9030-1f272e1029f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.438985] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1519.438985] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260eef4-c9a0-26bd-5864-3960c9e579b9" [ 1519.438985] env[62508]: _type = "Task" [ 1519.438985] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.459497] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260eef4-c9a0-26bd-5864-3960c9e579b9, 'name': SearchDatastore_Task, 'duration_secs': 0.014982} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.459942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.460284] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1519.460618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.460779] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.461070] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1519.464482] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6e33bdb-de4c-47b6-88a3-26da66d41833 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.476287] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1519.476589] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1519.477540] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-231a828d-3afc-4ec9-8421-d0493827b868 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.489451] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1519.489451] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520c3eca-c691-1d65-e4fe-59db8a68f4f3" [ 1519.489451] env[62508]: _type = "Task" [ 1519.489451] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.505878] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520c3eca-c691-1d65-e4fe-59db8a68f4f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009795} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.508523] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d0683a4-8cb0-4b21-ada0-0a9b52db8ac7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.515294] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1519.515294] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52df9425-92bd-de5a-d470-d63edcf65561" [ 1519.515294] env[62508]: _type = "Task" [ 1519.515294] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.525441] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52df9425-92bd-de5a-d470-d63edcf65561, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.661810] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29afba6-39dc-4f36-b7ce-8f22fec7be0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.669838] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ee1ae-524b-454d-8750-196c64a0ad13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.708612] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25dc2020-e9bb-4793-bee2-464ed0b8b44c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.712923] env[62508]: DEBUG nova.compute.manager [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Received event network-vif-plugged-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.713259] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Acquiring lock "a0245a18-638d-4c32-bea2-456408b5e001-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.713502] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Lock "a0245a18-638d-4c32-bea2-456408b5e001-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.713692] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Lock "a0245a18-638d-4c32-bea2-456408b5e001-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.713862] env[62508]: DEBUG nova.compute.manager [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] No waiting events found dispatching network-vif-plugged-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.714135] env[62508]: WARNING nova.compute.manager [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Received unexpected event network-vif-plugged-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad for instance with vm_state building and task_state spawning. [ 1519.714468] env[62508]: DEBUG nova.compute.manager [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Received event network-changed-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.714468] env[62508]: DEBUG nova.compute.manager [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Refreshing instance network info cache due to event network-changed-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1519.714728] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Acquiring lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.724015] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8854759-8b29-496d-8104-b9e1124109e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.741304] env[62508]: DEBUG nova.compute.provider_tree [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1519.765122] env[62508]: DEBUG nova.network.neutron [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updated VIF entry in instance network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1519.765838] env[62508]: DEBUG nova.network.neutron [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.783750] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775938, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.849508] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1520.004813] env[62508]: DEBUG nova.network.neutron [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updating instance_info_cache with network_info: [{"id": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "address": "fa:16:3e:a9:e4:00", "network": {"id": "e61481a9-5276-457c-85c3-7458f565cf18", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-918393494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4535e0943ae14d8abb9b17c590fd0f4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97e5cf3e-ba", "ovs_interfaceid": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.027181] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52df9425-92bd-de5a-d470-d63edcf65561, 'name': SearchDatastore_Task, 'duration_secs': 0.009772} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.027460] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.027712] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1520.028025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f3005ac-36c8-4f06-89be-0ae448167eb7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.035449] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1520.035449] env[62508]: value = "task-1775940" [ 1520.035449] env[62508]: _type = "Task" [ 1520.035449] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.044637] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.243936] env[62508]: DEBUG nova.scheduler.client.report [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1520.267876] env[62508]: DEBUG oslo_concurrency.lockutils [req-1a29271d-2196-4de1-9b84-643e82a489d8 req-71da9b5e-ec2f-4648-8411-5c80bffdea92 service nova] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.285771] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775938, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.640272} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.286084] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 6ae078f6-3b96-4b49-b282-cae74d742c97/6ae078f6-3b96-4b49-b282-cae74d742c97.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.286301] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.286559] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86312b33-a1a8-4fbd-9d02-381440856765 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.293135] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1520.293135] env[62508]: value = "task-1775941" [ 1520.293135] env[62508]: _type = "Task" [ 1520.293135] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.302919] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775941, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.511029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Releasing lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.511441] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Instance network_info: |[{"id": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "address": "fa:16:3e:a9:e4:00", "network": {"id": "e61481a9-5276-457c-85c3-7458f565cf18", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-918393494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4535e0943ae14d8abb9b17c590fd0f4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97e5cf3e-ba", "ovs_interfaceid": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1520.511791] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Acquired lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.511975] env[62508]: DEBUG nova.network.neutron [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Refreshing network info cache for port 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1520.513278] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:e4:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1520.522331] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Creating folder: Project (4535e0943ae14d8abb9b17c590fd0f4c). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1520.526121] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54007f2f-10ff-46e1-bc78-2d67d1743aaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.541489] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Created folder: Project (4535e0943ae14d8abb9b17c590fd0f4c) in parent group-v368536. [ 1520.541819] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Creating folder: Instances. Parent ref: group-v368700. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1520.545682] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5c9ef70-e4a8-4073-b948-d097ec566a8a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.547925] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775940, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.556084] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Created folder: Instances in parent group-v368700. [ 1520.556340] env[62508]: DEBUG oslo.service.loopingcall [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1520.556595] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1520.556825] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-913ed248-258d-406f-87e3-178faed8f83a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.581136] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.581136] env[62508]: value = "task-1775944" [ 1520.581136] env[62508]: _type = "Task" [ 1520.581136] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.591997] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775944, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.750065] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.750607] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1520.753834] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.062s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.754059] env[62508]: DEBUG nova.objects.instance [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lazy-loading 'resources' on Instance uuid 30e8b6ca-10fd-4e98-815d-1622f162b05c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1520.778732] env[62508]: DEBUG nova.network.neutron [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updated VIF entry in instance network info cache for port 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.779152] env[62508]: DEBUG nova.network.neutron [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updating instance_info_cache with network_info: [{"id": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "address": "fa:16:3e:a9:e4:00", "network": {"id": "e61481a9-5276-457c-85c3-7458f565cf18", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-918393494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4535e0943ae14d8abb9b17c590fd0f4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97e5cf3e-ba", "ovs_interfaceid": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.804809] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775941, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124561} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.805042] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1520.805847] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc688a4f-abd4-4401-adbc-bed20d7bac36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.829643] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 6ae078f6-3b96-4b49-b282-cae74d742c97/6ae078f6-3b96-4b49-b282-cae74d742c97.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1520.829979] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b8ea420-ef35-4b5e-bac2-1b8de29a61da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.850673] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1520.850673] env[62508]: value = "task-1775945" [ 1520.850673] env[62508]: _type = "Task" [ 1520.850673] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.860100] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.045530] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.677192} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.045841] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1521.046112] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1521.046416] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86f078d8-82d3-4503-9025-7edae307c8bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.052579] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1521.052579] env[62508]: value = "task-1775946" [ 1521.052579] env[62508]: _type = "Task" [ 1521.052579] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.060223] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.091167] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775944, 'name': CreateVM_Task, 'duration_secs': 0.465756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.091335] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1521.092081] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.092257] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.092652] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.092924] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb263a8-f083-4628-90d1-934edc4c3c85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.097587] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1521.097587] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526b12a4-aa98-929d-4309-5f36ecd312b4" [ 1521.097587] env[62508]: _type = "Task" [ 1521.097587] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.107375] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526b12a4-aa98-929d-4309-5f36ecd312b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.257755] env[62508]: DEBUG nova.compute.utils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1521.263118] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1521.263678] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1521.281930] env[62508]: DEBUG oslo_concurrency.lockutils [req-40311cb7-b04c-4011-9cd6-ce400eeae7c0 req-3ceaa053-af35-4791-a49e-3791663b38cc service nova] Releasing lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.308574] env[62508]: DEBUG nova.policy [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '463edfa1b64a4f04b727dafc833d7d0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '514b44e915e543d89fb0429f0357c86c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1521.364270] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.562562] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063794} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.562844] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.563699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5c0598-eae1-4eae-bb60-152391c6e4c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.588921] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.592296] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Successfully created port: 8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1521.593965] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c5291ea-b9a1-4d2c-9014-08c717501aa9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.619793] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526b12a4-aa98-929d-4309-5f36ecd312b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011114} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.624786] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.625113] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1521.625520] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.625738] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.626070] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1521.626271] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1521.626271] env[62508]: value = "task-1775947" [ 1521.626271] env[62508]: _type = "Task" [ 1521.626271] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.626693] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66701ebf-78dd-410e-83de-5ce12507af8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.638699] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.645450] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1521.645648] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1521.646466] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a3d60ca-2988-45ed-b4d0-6911a341ef88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.652123] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1521.652123] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52426293-79c7-a172-b108-bf03810901e9" [ 1521.652123] env[62508]: _type = "Task" [ 1521.652123] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.662609] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52426293-79c7-a172-b108-bf03810901e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.688205] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fa9db5-426a-40c7-9cfe-3ceee5dcb4ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.695984] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7be1220-b828-46c5-b606-5d63beb0f88c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.728594] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aedb56-0044-414a-9da0-c8d1156438a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.736553] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f197ea0e-685a-49a8-a2bf-e7d8e569ae04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.750851] env[62508]: DEBUG nova.compute.provider_tree [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.763620] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1521.863741] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775945, 'name': ReconfigVM_Task, 'duration_secs': 0.67047} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.864049] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 6ae078f6-3b96-4b49-b282-cae74d742c97/6ae078f6-3b96-4b49-b282-cae74d742c97.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.864757] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a31cd8d-1264-4b84-a418-73a2b81f9d8b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.875500] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1521.875500] env[62508]: value = "task-1775948" [ 1521.875500] env[62508]: _type = "Task" [ 1521.875500] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.887650] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775948, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.139370] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.165622] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52426293-79c7-a172-b108-bf03810901e9, 'name': SearchDatastore_Task, 'duration_secs': 0.012423} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.166732] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94b81556-67e9-434b-8bfe-2a2be5e830fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.172948] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1522.172948] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210cd4d-ca43-ae28-6f9b-a89f08a23cd6" [ 1522.172948] env[62508]: _type = "Task" [ 1522.172948] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.180768] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210cd4d-ca43-ae28-6f9b-a89f08a23cd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.254388] env[62508]: DEBUG nova.scheduler.client.report [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1522.386104] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775948, 'name': Rename_Task, 'duration_secs': 0.274091} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.386509] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.386938] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d771ba5-fe6a-4b07-a550-23fc796ce90c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.394219] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1522.394219] env[62508]: value = "task-1775949" [ 1522.394219] env[62508]: _type = "Task" [ 1522.394219] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.402274] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.645051] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775947, 'name': ReconfigVM_Task, 'duration_secs': 0.705435} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.645377] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Reconfigured VM instance instance-00000037 to attach disk [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358/f456dd83-6350-46b2-b06c-41dc5c477358.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.646047] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce4a771a-00b2-4cce-ae99-a59f33b3e870 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.653552] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1522.653552] env[62508]: value = "task-1775950" [ 1522.653552] env[62508]: _type = "Task" [ 1522.653552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.663453] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775950, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.683081] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210cd4d-ca43-ae28-6f9b-a89f08a23cd6, 'name': SearchDatastore_Task, 'duration_secs': 0.011165} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.683343] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.683624] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a0245a18-638d-4c32-bea2-456408b5e001/a0245a18-638d-4c32-bea2-456408b5e001.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1522.683891] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eca4a10d-188e-4e45-91c2-62074ccc5370 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.690688] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1522.690688] env[62508]: value = "task-1775951" [ 1522.690688] env[62508]: _type = "Task" [ 1522.690688] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.698727] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.760111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.762769] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.926s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.764544] env[62508]: INFO nova.compute.claims [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.772098] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1522.785659] env[62508]: INFO nova.scheduler.client.report [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Deleted allocations for instance 30e8b6ca-10fd-4e98-815d-1622f162b05c [ 1522.908795] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.137425] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Successfully updated port: 8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1523.165861] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775950, 'name': Rename_Task, 'duration_secs': 0.279127} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.166121] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1523.166467] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e89a9d8-b191-4307-b7ff-c61bc0b4db7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.175452] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1523.175452] env[62508]: value = "task-1775952" [ 1523.175452] env[62508]: _type = "Task" [ 1523.175452] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.190022] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.201596] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775951, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.294722] env[62508]: DEBUG oslo_concurrency.lockutils [None req-093d6519-0546-4602-9143-dfb5f4d32173 tempest-ServerTagsTestJSON-1465183808 tempest-ServerTagsTestJSON-1465183808-project-member] Lock "30e8b6ca-10fd-4e98-815d-1622f162b05c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.074s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.405511] env[62508]: DEBUG oslo_vmware.api [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775949, 'name': PowerOnVM_Task, 'duration_secs': 0.923099} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.405903] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.405986] env[62508]: INFO nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Took 11.64 seconds to spawn the instance on the hypervisor. [ 1523.406111] env[62508]: DEBUG nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1523.406923] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d925de-4901-4c06-86fb-74367793e02d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.639773] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.639920] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquired lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.640101] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1523.685961] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775952, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.700075] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597853} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.700338] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a0245a18-638d-4c32-bea2-456408b5e001/a0245a18-638d-4c32-bea2-456408b5e001.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1523.700548] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1523.700797] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-520c0b8b-0319-4355-b1eb-5f795c24e16b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.712433] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1523.712433] env[62508]: value = "task-1775953" [ 1523.712433] env[62508]: _type = "Task" [ 1523.712433] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.722987] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.926890] env[62508]: INFO nova.compute.manager [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Took 45.70 seconds to build instance. [ 1524.093702] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4371af3d-c9f4-4c7d-a933-0b632e03d396 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.102558] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af737457-26d8-427b-8cd9-1db843b9e3e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.138477] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bebc56b-6ae8-43ab-ae73-8bb8317ef0bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.147772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0c5cd3-08fd-43f0-8960-1113bb1ddd03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.162376] env[62508]: DEBUG nova.compute.provider_tree [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1524.176386] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.187584] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775952, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.222384] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097063} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.222662] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1524.223537] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf476521-b255-40a2-b2bd-74079167ce48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.250152] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] a0245a18-638d-4c32-bea2-456408b5e001/a0245a18-638d-4c32-bea2-456408b5e001.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.250511] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2a47fd9-b11d-4c98-9c86-b9a2cbe25729 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.275466] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1524.275466] env[62508]: value = "task-1775954" [ 1524.275466] env[62508]: _type = "Task" [ 1524.275466] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.289471] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775954, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.343577] env[62508]: DEBUG nova.network.neutron [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Updating instance_info_cache with network_info: [{"id": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "address": "fa:16:3e:83:c9:35", "network": {"id": "dcb8cc3a-3c99-4772-ad15-00d05b9e2b63", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1988455880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514b44e915e543d89fb0429f0357c86c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cfbc033-ea", "ovs_interfaceid": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.429288] env[62508]: DEBUG oslo_concurrency.lockutils [None req-419cd0fa-e768-4ef3-aaca-006514d7f3ff tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.217s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.689702] env[62508]: DEBUG oslo_vmware.api [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775952, 'name': PowerOnVM_Task, 'duration_secs': 1.081904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.689879] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1524.690055] env[62508]: DEBUG nova.compute.manager [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1524.690832] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a042a6-7bbb-44bd-af65-f95f8fc246c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.694167] env[62508]: DEBUG nova.scheduler.client.report [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1524.694410] env[62508]: DEBUG nova.compute.provider_tree [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 85 to 86 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1524.694635] env[62508]: DEBUG nova.compute.provider_tree [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1524.785494] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775954, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.846855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Releasing lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.847235] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Instance network_info: |[{"id": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "address": "fa:16:3e:83:c9:35", "network": {"id": "dcb8cc3a-3c99-4772-ad15-00d05b9e2b63", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1988455880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514b44e915e543d89fb0429f0357c86c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cfbc033-ea", "ovs_interfaceid": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1525.200188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.200779] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1525.203940] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.858s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.205468] env[62508]: INFO nova.compute.claims [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1525.213273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.286460] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775954, 'name': ReconfigVM_Task, 'duration_secs': 0.796107} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.286654] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Reconfigured VM instance instance-0000003a to attach disk [datastore1] a0245a18-638d-4c32-bea2-456408b5e001/a0245a18-638d-4c32-bea2-456408b5e001.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.287290] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bb1bf69-6f5a-4e0c-81e8-d0b170a65259 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.293657] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1525.293657] env[62508]: value = "task-1775955" [ 1525.293657] env[62508]: _type = "Task" [ 1525.293657] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.307284] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775955, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.710223] env[62508]: DEBUG nova.compute.utils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.711826] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.712054] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.757912] env[62508]: DEBUG nova.policy [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05df8f0c7c7c4d8e8e3dcc1646f7a56c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86a46b5a43dd41e48816a8d86e3685b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.803703] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775955, 'name': Rename_Task, 'duration_secs': 0.273645} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.803991] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1525.804265] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-046c1455-b6a1-4b5c-abcb-3df30b745738 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.811969] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1525.811969] env[62508]: value = "task-1775956" [ 1525.811969] env[62508]: _type = "Task" [ 1525.811969] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.824299] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.042543] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Successfully created port: a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.218044] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1526.327627] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775956, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.602108] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c0988c-6cee-4b4b-bacc-5e4f293f95f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.610320] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dcea23-7c9d-4680-a9db-345fda8b0b3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.640782] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d48e0e-9838-4f67-ab01-267ff25d730d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.648930] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b96533-127d-4e9b-8f1b-766ad032bb0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.664083] env[62508]: DEBUG nova.compute.provider_tree [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1526.825782] env[62508]: DEBUG oslo_vmware.api [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1775956, 'name': PowerOnVM_Task, 'duration_secs': 0.54275} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.826123] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1526.826312] env[62508]: INFO nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1526.826489] env[62508]: DEBUG nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1526.827347] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82536be9-6581-4b47-8ad4-bf3d8412ff56 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.199722] env[62508]: DEBUG nova.scheduler.client.report [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1527.200035] env[62508]: DEBUG nova.compute.provider_tree [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 86 to 87 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1527.200321] env[62508]: DEBUG nova.compute.provider_tree [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1527.229884] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1527.346446] env[62508]: INFO nova.compute.manager [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Took 40.49 seconds to build instance. [ 1527.606763] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Successfully updated port: a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.706353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.707142] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1527.711067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.735s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.711413] env[62508]: DEBUG nova.objects.instance [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lazy-loading 'resources' on Instance uuid a226327d-11df-45e0-bef8-2337a0317c9e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.849043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a00be26-39fd-4dbb-8e80-daa61a5ac94d tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.340s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.109139] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.109318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.109391] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1528.214745] env[62508]: DEBUG nova.compute.utils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.219207] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1528.219402] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.260894] env[62508]: DEBUG nova.policy [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274b6e073c6e48b69d2734ca81a7c811', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1965e796bcbd44a1be5a9c1b50698c0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.531332] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Successfully created port: 93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.574599] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04486a5b-cfeb-489b-a76d-958981775abf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.584062] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf963d-2347-4d6d-807d-dc69d3a4b65c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.619756] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55d2a5e-c1ab-4cd5-90fc-197699af6c43 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.627862] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fd8ae5-19cb-4b1f-a060-21d18e4b1b5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.643593] env[62508]: DEBUG nova.compute.provider_tree [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.664406] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.720237] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1528.883485] env[62508]: DEBUG nova.network.neutron [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.146702] env[62508]: DEBUG nova.scheduler.client.report [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.386984] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.387354] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance network_info: |[{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1529.563869] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.564182] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.564385] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.564581] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.564770] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.564918] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.565242] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.565433] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.565624] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.565808] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.566013] env[62508]: DEBUG nova.virt.hardware [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.568949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe1f3c2-f512-40c2-b642-d741ded0fa35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.578864] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.579107] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.579270] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.579455] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.579596] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.579740] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.579940] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.580118] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.580288] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.580450] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.580625] env[62508]: DEBUG nova.virt.hardware [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.581982] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d090551f-e747-4565-a1b8-039729e7c673 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.587391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213647b5-b1e3-449c-802a-1a62451121f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.600794] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:6b:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32066ea0-906e-445b-89fa-625dd3384edf', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.609036] env[62508]: DEBUG oslo.service.loopingcall [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.611135] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.611362] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.611565] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.611731] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.611885] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.612045] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.612258] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.612416] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.612579] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.612905] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.612905] env[62508]: DEBUG nova.virt.hardware [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.614937] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.616985] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e0fc22-bbf8-4489-b044-b23919aee59c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.619890] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb264e27-f7b8-4ee9-96cb-d4ccdca9ce97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.635607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e29a8ad-0094-4150-a0ef-022cf97c626d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.641501] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1529.642788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a3b479-24a1-451a-b3a8-15106054dc69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.648976] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa6ced4-d28d-48eb-8dc9-dd6c12abee2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.663371] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.665277] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.665277] env[62508]: value = "task-1775957" [ 1529.665277] env[62508]: _type = "Task" [ 1529.665277] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.665497] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1529.665642] env[62508]: ERROR oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk due to incomplete transfer. [ 1529.666166] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:c9:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b5f9472-1844-4c99-8804-8f193cfff562', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cfbc033-eadd-41fc-b4d6-c564a92f8b93', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.673511] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Creating folder: Project (514b44e915e543d89fb0429f0357c86c). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.674153] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.992s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.675758] env[62508]: INFO nova.compute.claims [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1529.678824] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-726243fb-c5a1-44a6-b561-5186e8af4570 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.680804] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b98b40b5-74de-4d8d-9754-11a698dee268 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.697027] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:cd:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a51ee93a-fba9-4802-9791-4c16f273346e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.702569] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating folder: Project (86a46b5a43dd41e48816a8d86e3685b9). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.706343] env[62508]: INFO nova.scheduler.client.report [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Deleted allocations for instance a226327d-11df-45e0-bef8-2337a0317c9e [ 1529.706343] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abded177-b675-4bda-a859-b270e67b4bd4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.715796] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Created folder: Project (514b44e915e543d89fb0429f0357c86c) in parent group-v368536. [ 1529.716015] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Creating folder: Instances. Parent ref: group-v368704. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.716299] env[62508]: DEBUG oslo_vmware.rw_handles [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a0e7d3-ab1d-55e3-e86e-18729c8c8680/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1529.716475] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Uploaded image 0cfee855-f8be-44c1-acb6-ed4711019248 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1529.721023] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1529.722443] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d0da045-f5b5-4c98-a560-d3ee2d1ec7b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.724198] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d9600050-5787-414b-a866-3ed5f17e29b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.725655] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775957, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.729301] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created folder: Project (86a46b5a43dd41e48816a8d86e3685b9) in parent group-v368536. [ 1529.729301] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating folder: Instances. Parent ref: group-v368705. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1529.729301] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e55da25b-3d45-4663-9ca8-826e63d4bf77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.729400] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1529.734776] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1529.734776] env[62508]: value = "task-1775961" [ 1529.734776] env[62508]: _type = "Task" [ 1529.734776] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.739298] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Created folder: Instances in parent group-v368704. [ 1529.739617] env[62508]: DEBUG oslo.service.loopingcall [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.740080] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.740306] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd99255a-422f-40e6-894f-f8635cb4cbd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.761616] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775961, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.761885] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created folder: Instances in parent group-v368705. [ 1529.762113] env[62508]: DEBUG oslo.service.loopingcall [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.764417] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.765259] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-705c8868-bf10-42cb-9b1f-d831cf1f93b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.785756] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.786079] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.786313] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.786577] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.786792] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.787011] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.787563] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.787563] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.787774] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.788018] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.788269] env[62508]: DEBUG nova.virt.hardware [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.788650] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.788650] env[62508]: value = "task-1775963" [ 1529.788650] env[62508]: _type = "Task" [ 1529.788650] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.790131] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe0996f-7932-456b-aacf-3c7f5d9291b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.799182] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.799182] env[62508]: value = "task-1775964" [ 1529.799182] env[62508]: _type = "Task" [ 1529.799182] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.807311] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775963, 'name': CreateVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.809265] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65ab8a6-b057-4df2-8c81-ee64740cd969 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.816797] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775964, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.188233] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775957, 'name': CreateVM_Task, 'duration_secs': 0.479308} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.188609] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.189132] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.189305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.189635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.189900] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2c40056-f97d-426d-b5d1-96a044f04af5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.195933] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1530.195933] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c201a9-53a9-f270-568e-4d3c32964eaa" [ 1530.195933] env[62508]: _type = "Task" [ 1530.195933] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.205907] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c201a9-53a9-f270-568e-4d3c32964eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.213257] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1a8b5144-6397-4b46-8c9d-e80d3bb13392 tempest-ServerRescueTestJSON-1086587932 tempest-ServerRescueTestJSON-1086587932-project-member] Lock "a226327d-11df-45e0-bef8-2337a0317c9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.014s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.244993] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775961, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.313726] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775963, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.318141] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775964, 'name': CreateVM_Task, 'duration_secs': 0.453498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.318141] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.318141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.333026] env[62508]: DEBUG nova.compute.manager [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Received event network-vif-plugged-8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.333274] env[62508]: DEBUG oslo_concurrency.lockutils [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] Acquiring lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.333500] env[62508]: DEBUG oslo_concurrency.lockutils [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.333700] env[62508]: DEBUG oslo_concurrency.lockutils [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.334016] env[62508]: DEBUG nova.compute.manager [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] No waiting events found dispatching network-vif-plugged-8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1530.334484] env[62508]: WARNING nova.compute.manager [req-40f94ce8-ca55-4656-bfa1-b74a04c860ce req-3ab8d0ce-332d-484b-97ff-c52466ad1ba9 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Received unexpected event network-vif-plugged-8cfbc033-eadd-41fc-b4d6-c564a92f8b93 for instance with vm_state building and task_state spawning. [ 1530.382940] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1530.383512] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac706a4d-7e9b-4845-80be-dc5eadae7582 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.388097] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Successfully updated port: 93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.398017] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1530.398017] env[62508]: ERROR oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk due to incomplete transfer. [ 1530.398017] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99d659fe-922f-4b4b-a368-462b231808d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.405179] env[62508]: DEBUG oslo_vmware.rw_handles [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1c243-4833-a863-9257-083c43a9900c/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1530.405415] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Uploaded image 2035c814-f538-4a5e-ae70-807d6ea61161 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1530.408275] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1530.408797] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4be66a94-0379-408c-8a9c-ef76ac78473c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.416183] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1530.416183] env[62508]: value = "task-1775965" [ 1530.416183] env[62508]: _type = "Task" [ 1530.416183] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.424785] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775965, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.711261] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c201a9-53a9-f270-568e-4d3c32964eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.016956} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.712028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.712279] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.713177] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.713177] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.713177] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.713177] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.713601] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.713841] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b1dab1f-a37a-4099-8696-66a1b6550728 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.715783] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f12afcf-a467-4aa1-a421-fdb311dd71e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.724018] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1530.724018] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a4e3c6-f047-61b9-5319-99f7c54e2ed1" [ 1530.724018] env[62508]: _type = "Task" [ 1530.724018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.727656] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.727863] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.731523] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5feb2636-c143-4093-89f2-dd65b479fbdc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.738837] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a4e3c6-f047-61b9-5319-99f7c54e2ed1, 'name': SearchDatastore_Task, 'duration_secs': 0.010991} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.742814] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.743195] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.743544] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.745593] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1530.745593] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529ed32f-0d56-133f-dcdd-92c9fef00509" [ 1530.745593] env[62508]: _type = "Task" [ 1530.745593] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.756100] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775961, 'name': Destroy_Task, 'duration_secs': 0.957511} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.757071] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Destroyed the VM [ 1530.757362] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1530.757621] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3503331d-6727-42ba-aac6-018648c40e0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.762918] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529ed32f-0d56-133f-dcdd-92c9fef00509, 'name': SearchDatastore_Task, 'duration_secs': 0.013723} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.764069] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8029386-085d-4ab3-be25-c7b8c4687120 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.767858] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1530.767858] env[62508]: value = "task-1775966" [ 1530.767858] env[62508]: _type = "Task" [ 1530.767858] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.776181] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1530.776181] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5292352e-cacb-4fa3-a5b1-f4705f0d86a5" [ 1530.776181] env[62508]: _type = "Task" [ 1530.776181] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.783582] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775966, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.792731] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5292352e-cacb-4fa3-a5b1-f4705f0d86a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010078} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.795081] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.795340] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1530.796420] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.796616] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.796828] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c608a0ab-72c6-4ee3-91d3-2064ead3ec2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.802996] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90158cd3-075f-47e8-ae88-37c57c74df52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.812958] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775963, 'name': CreateVM_Task, 'duration_secs': 0.618184} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.819909] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.819909] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1530.819909] env[62508]: value = "task-1775967" [ 1530.819909] env[62508]: _type = "Task" [ 1530.819909] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.819909] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.819909] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.820427] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.820537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.820894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.821213] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0425f9f-d46b-4958-87d3-789a47d96a1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.824108] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f3e9992-79cb-441a-9272-820f2dea4b48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.832889] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1530.832889] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526d2e4e-404d-3131-c2c1-7f1d5b3261e2" [ 1530.832889] env[62508]: _type = "Task" [ 1530.832889] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.837512] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1530.837512] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52209cab-dfec-5ca4-e077-bda623bb15b9" [ 1530.837512] env[62508]: _type = "Task" [ 1530.837512] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.838142] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.855434] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526d2e4e-404d-3131-c2c1-7f1d5b3261e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.857157] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.857441] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.857665] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.857960] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52209cab-dfec-5ca4-e077-bda623bb15b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.858818] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3c02085-b0cb-49c8-ac26-135480602466 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.870712] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1530.870712] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52af076e-9136-a418-3cab-dcb369504725" [ 1530.870712] env[62508]: _type = "Task" [ 1530.870712] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.880929] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52af076e-9136-a418-3cab-dcb369504725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.892515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.892703] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.892864] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1530.927529] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775965, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.970197] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "6ae078f6-3b96-4b49-b282-cae74d742c97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.970793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.970793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.970960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.971177] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.973435] env[62508]: INFO nova.compute.manager [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Terminating instance [ 1530.978464] env[62508]: DEBUG nova.compute.manager [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1530.978817] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.980170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d89f46-14d3-4293-9711-47c7a66cbdbd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.989917] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.990157] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc65b0c3-cb60-4c8c-ac1d-ed387a8aab8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.996740] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1530.996740] env[62508]: value = "task-1775968" [ 1530.996740] env[62508]: _type = "Task" [ 1530.996740] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.010315] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.182358] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1b26aa-35db-4014-b180-af20010d1b3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.197577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf63ec37-eb92-40b5-a8a5-52ac34bc8e1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.242986] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dc1db0-b668-4811-b5b6-caead28c2e51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.253952] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a259ad54-2733-413e-a5e4-d71f124a4bae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.275757] env[62508]: DEBUG nova.compute.provider_tree [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1531.287966] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775966, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.289297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "f456dd83-6350-46b2-b06c-41dc5c477358" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.289727] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.289952] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.290216] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.290465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.293119] env[62508]: INFO nova.compute.manager [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Terminating instance [ 1531.296105] env[62508]: DEBUG nova.compute.manager [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.296417] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.297659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798541d1-8b5b-4de1-b657-68b33f0c4644 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.309258] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.309258] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b8e7eea-3194-48aa-8a9a-0617c11c5faa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.316547] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1531.316547] env[62508]: value = "task-1775969" [ 1531.316547] env[62508]: _type = "Task" [ 1531.316547] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.328307] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.334099] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775967, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.369603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.369886] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.370117] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.370304] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.370495] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.376359] env[62508]: INFO nova.compute.manager [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Terminating instance [ 1531.378784] env[62508]: DEBUG nova.compute.manager [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.378999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.379914] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fa5021-bf2d-4d6e-949e-6a0750581d14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.386958] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52af076e-9136-a418-3cab-dcb369504725, 'name': SearchDatastore_Task, 'duration_secs': 0.013502} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.387597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.387857] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.388145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.388335] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1531.388543] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32c89ef5-5386-459d-a3f4-56f08bae8b6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.392295] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c539765-e78c-4d11-ab17-ecde7ea63733 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.394575] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.396725] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bceab7a4-ace9-492a-9b9a-ce46777cc75d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.400228] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1531.400228] env[62508]: value = "task-1775970" [ 1531.400228] env[62508]: _type = "Task" [ 1531.400228] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.406731] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1531.406913] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1531.410781] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6675488d-031e-4cf1-84d5-0f89c948cf42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.413242] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.416209] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1531.416209] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5203dbe9-7c41-dbd0-dea9-1a73294696d9" [ 1531.416209] env[62508]: _type = "Task" [ 1531.416209] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.427843] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5203dbe9-7c41-dbd0-dea9-1a73294696d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.431150] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775965, 'name': Destroy_Task, 'duration_secs': 0.953186} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.431418] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Destroyed the VM [ 1531.431669] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1531.431953] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3e5b6942-9a65-48cb-b4e2-9f1fa0554426 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.437597] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1531.437597] env[62508]: value = "task-1775972" [ 1531.437597] env[62508]: _type = "Task" [ 1531.437597] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.445211] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775972, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.507138] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775968, 'name': PowerOffVM_Task, 'duration_secs': 0.378128} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.507510] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.507729] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.508023] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec70925b-68c0-41da-9d53-836d614a89cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.585872] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.765591] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.765936] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.766244] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleting the datastore file [datastore1] 6ae078f6-3b96-4b49-b282-cae74d742c97 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.766604] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3658a901-7f9d-437b-bef4-53ef84b316f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.784320] env[62508]: DEBUG nova.scheduler.client.report [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1531.794505] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1531.794505] env[62508]: value = "task-1775974" [ 1531.794505] env[62508]: _type = "Task" [ 1531.794505] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.802725] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775966, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.811553] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.829677] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775969, 'name': PowerOffVM_Task, 'duration_secs': 0.219537} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.830438] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.830719] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.831152] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-708afda2-cb04-4695-887b-5635a8060e5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.836408] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530543} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.837297] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.837632] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.837971] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9dc9b90-f5ab-41cb-b67e-b13121d00e6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.845549] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1531.845549] env[62508]: value = "task-1775976" [ 1531.845549] env[62508]: _type = "Task" [ 1531.845549] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.855295] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.911110] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.926704] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5203dbe9-7c41-dbd0-dea9-1a73294696d9, 'name': SearchDatastore_Task, 'duration_secs': 0.01066} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.927730] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52fd7a01-449e-4201-b660-180ffde57e4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.933709] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1531.933709] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae9986-5b72-901e-ef3f-1399b0a47738" [ 1531.933709] env[62508]: _type = "Task" [ 1531.933709] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.950091] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae9986-5b72-901e-ef3f-1399b0a47738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.960355] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775972, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.962622] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.963193] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.963416] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] f456dd83-6350-46b2-b06c-41dc5c477358 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.963935] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ecddca3-8dc6-401b-9e4c-8160a999a878 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.971920] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1531.971920] env[62508]: value = "task-1775977" [ 1531.971920] env[62508]: _type = "Task" [ 1531.971920] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.981772] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.178072] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1532.179690] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1532.180907] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] e156aef5-bb56-4c17-9e7e-9419b672c9cf {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1532.180907] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca5cb57a-6a19-4027-ae39-b7b3a2c902f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.192051] env[62508]: DEBUG oslo_vmware.api [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1532.192051] env[62508]: value = "task-1775978" [ 1532.192051] env[62508]: _type = "Task" [ 1532.192051] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.198391] env[62508]: DEBUG nova.network.neutron [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Updating instance_info_cache with network_info: [{"id": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "address": "fa:16:3e:2f:39:be", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93b3f95e-7a", "ovs_interfaceid": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.210055] env[62508]: DEBUG oslo_vmware.api [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.240928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.241384] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.286919] env[62508]: DEBUG oslo_vmware.api [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775966, 'name': RemoveSnapshot_Task, 'duration_secs': 1.079362} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.287593] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1532.288925] env[62508]: INFO nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Took 19.88 seconds to snapshot the instance on the hypervisor. [ 1532.295875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.296961] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1532.300558] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 15.842s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.314960] env[62508]: DEBUG oslo_vmware.api [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1775974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342454} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.315663] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.315956] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.316259] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.316553] env[62508]: INFO nova.compute.manager [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1532.319019] env[62508]: DEBUG oslo.service.loopingcall [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.319019] env[62508]: DEBUG nova.compute.manager [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1532.319019] env[62508]: DEBUG nova.network.neutron [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.357091] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127977} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.357444] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.358318] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8332901-1ee5-4333-8a1b-b9245fb9e9ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.384960] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.386062] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1d89675-8387-4b28-b041-2cbd9dd66132 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.416908] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.418724] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1532.418724] env[62508]: value = "task-1775979" [ 1532.418724] env[62508]: _type = "Task" [ 1532.418724] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.428229] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775979, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.445900] env[62508]: DEBUG nova.compute.manager [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Received event network-changed-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.447029] env[62508]: DEBUG nova.compute.manager [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Refreshing instance network info cache due to event network-changed-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.447029] env[62508]: DEBUG oslo_concurrency.lockutils [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] Acquiring lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.447029] env[62508]: DEBUG oslo_concurrency.lockutils [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] Acquired lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.447029] env[62508]: DEBUG nova.network.neutron [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Refreshing network info cache for port 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.454557] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae9986-5b72-901e-ef3f-1399b0a47738, 'name': SearchDatastore_Task, 'duration_secs': 0.032407} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.455793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.455793] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fd658703-d477-4d21-b0ad-7ff08d4c2f97/fd658703-d477-4d21-b0ad-7ff08d4c2f97.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1532.455793] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ba42b13-fc97-4e69-8d23-fe48d652c40a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.462065] env[62508]: DEBUG oslo_vmware.api [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775972, 'name': RemoveSnapshot_Task, 'duration_secs': 0.928196} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.463304] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1532.463547] env[62508]: INFO nova.compute.manager [None req-b613b69f-f5d7-4c97-b8c2-a57df07fb8e0 tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 21.31 seconds to snapshot the instance on the hypervisor. [ 1532.471761] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1532.471761] env[62508]: value = "task-1775980" [ 1532.471761] env[62508]: _type = "Task" [ 1532.471761] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.484901] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.490946] env[62508]: DEBUG oslo_vmware.api [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1775977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267951} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.491224] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.491407] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.491586] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.492380] env[62508]: INFO nova.compute.manager [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1532.492649] env[62508]: DEBUG oslo.service.loopingcall [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.492853] env[62508]: DEBUG nova.compute.manager [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1532.492956] env[62508]: DEBUG nova.network.neutron [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.584330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "68d64a06-f752-459c-a152-157893e79bfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.584597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.705270] env[62508]: DEBUG oslo_vmware.api [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1775978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184393} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.705270] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.705270] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.705270] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.705515] env[62508]: INFO nova.compute.manager [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1532.705618] env[62508]: DEBUG oslo.service.loopingcall [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.705791] env[62508]: DEBUG nova.compute.manager [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1532.705889] env[62508]: DEBUG nova.network.neutron [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.709338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.709644] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Instance network_info: |[{"id": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "address": "fa:16:3e:2f:39:be", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93b3f95e-7a", "ovs_interfaceid": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1532.710074] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:39:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93b3f95e-7a03-4164-a2b0-8b0c647d4377', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.718788] env[62508]: DEBUG oslo.service.loopingcall [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.720311] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.721152] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42fb3b70-8df4-4de7-bdbb-594057af8cf9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.745331] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1532.750431] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.750431] env[62508]: value = "task-1775981" [ 1532.750431] env[62508]: _type = "Task" [ 1532.750431] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.761108] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775981, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.795016] env[62508]: DEBUG nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance disappeared during snapshot {{(pid=62508) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1532.808548] env[62508]: DEBUG nova.objects.instance [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lazy-loading 'migration_context' on Instance uuid e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1532.812421] env[62508]: DEBUG nova.compute.utils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1532.814483] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1532.814483] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1532.823189] env[62508]: DEBUG nova.compute.manager [None req-521486d1-6ee8-421d-bff0-ce2c25e583f6 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image not found during clean up 0cfee855-f8be-44c1-acb6-ed4711019248 {{(pid=62508) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4501}} [ 1532.918375] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775970, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.396299} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.918869] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1532.919227] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1532.919736] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fb06d4b-01d5-45cb-8b23-cf715d1af405 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.937802] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775979, 'name': ReconfigVM_Task, 'duration_secs': 0.482773} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.939594] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 38289797-ecf5-4207-a164-d70228e4411d/38289797-ecf5-4207-a164-d70228e4411d.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.943525] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1532.943525] env[62508]: value = "task-1775982" [ 1532.943525] env[62508]: _type = "Task" [ 1532.943525] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.943525] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95ff03f4-0808-49b3-a084-95dbadde7a1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.954580] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.958217] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1532.958217] env[62508]: value = "task-1775983" [ 1532.958217] env[62508]: _type = "Task" [ 1532.958217] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.967373] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775983, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.981907] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775980, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.086856] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1533.216017] env[62508]: DEBUG nova.policy [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '724b74d525e84062b8adfe88b8c67a14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08a2bcaca6e4406a8ccda7b934995f15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1533.262480] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775981, 'name': CreateVM_Task, 'duration_secs': 0.510932} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.262651] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.263310] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.263470] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.263826] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.264086] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4710a91d-e43f-4b57-a14b-ddb857e1f9ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.270882] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1533.270882] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a800b-75aa-027d-79c6-4cad10a8d02e" [ 1533.270882] env[62508]: _type = "Task" [ 1533.270882] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.275721] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.278919] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a800b-75aa-027d-79c6-4cad10a8d02e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.317328] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1533.375430] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Received event network-changed-8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.375660] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Refreshing instance network info cache due to event network-changed-8cfbc033-eadd-41fc-b4d6-c564a92f8b93. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1533.375913] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquiring lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.376089] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquired lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.376293] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Refreshing network info cache for port 8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1533.452549] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110782} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.455493] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1533.456844] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ec077f-83e6-4f09-a6d6-7ab9d6fd5400 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.479965] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775983, 'name': Rename_Task, 'duration_secs': 0.247864} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.490961] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1533.497152] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.500567] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7d4435c-29c1-40a8-91b0-adfec5525b24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.517259] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d06a51d-c78c-4233-97a8-67127ded5427 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.525381] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.733994} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.527709] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fd658703-d477-4d21-b0ad-7ff08d4c2f97/fd658703-d477-4d21-b0ad-7ff08d4c2f97.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1533.528082] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1533.528278] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1533.528278] env[62508]: value = "task-1775985" [ 1533.528278] env[62508]: _type = "Task" [ 1533.528278] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.528507] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1533.528507] env[62508]: value = "task-1775984" [ 1533.528507] env[62508]: _type = "Task" [ 1533.528507] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.530893] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2b28008-e417-4b6d-8da7-da015f986ddd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.545510] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.553074] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775984, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.553403] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1533.553403] env[62508]: value = "task-1775986" [ 1533.553403] env[62508]: _type = "Task" [ 1533.553403] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.562440] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.608856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.667044] env[62508]: DEBUG nova.network.neutron [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.782877] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a800b-75aa-027d-79c6-4cad10a8d02e, 'name': SearchDatastore_Task, 'duration_secs': 0.009546} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.786667] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.786976] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1533.787216] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.787365] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.787607] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1533.788037] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a1f6a7b-1bf9-4a0a-ac2e-61660e99d5c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.799659] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1533.799846] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1533.803149] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d592e61d-0e52-4a31-967d-0a7ccc4d2dc7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.810457] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1533.810457] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a49b7-708a-5861-3dc4-35b8d6393947" [ 1533.810457] env[62508]: _type = "Task" [ 1533.810457] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.825154] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a49b7-708a-5861-3dc4-35b8d6393947, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.852603] env[62508]: DEBUG nova.network.neutron [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.854752] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de58a96-f757-46d2-be82-1ca1d8583500 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.863342] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3b3eb7-b728-476d-83c7-24880fb9a4a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.653975] env[62508]: DEBUG nova.network.neutron [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updated VIF entry in instance network info cache for port 97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1534.654349] env[62508]: DEBUG nova.network.neutron [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updating instance_info_cache with network_info: [{"id": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "address": "fa:16:3e:a9:e4:00", "network": {"id": "e61481a9-5276-457c-85c3-7458f565cf18", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-918393494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4535e0943ae14d8abb9b17c590fd0f4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97e5cf3e-ba", "ovs_interfaceid": "97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.655931] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Successfully created port: 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1534.659931] env[62508]: DEBUG nova.network.neutron [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.662294] env[62508]: INFO nova.compute.manager [-] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Took 2.34 seconds to deallocate network for instance. [ 1534.662577] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1534.665046] env[62508]: INFO nova.compute.manager [-] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Took 2.17 seconds to deallocate network for instance. [ 1534.665347] env[62508]: DEBUG nova.compute.manager [req-dead152b-6796-4885-a097-aa666d6ffdfa req-0a5c6153-67ce-4fcd-9481-497f05a6048f service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Received event network-vif-deleted-8ba6ad85-c68d-4f7c-87dd-acd4fe19986b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.665568] env[62508]: INFO nova.compute.manager [req-dead152b-6796-4885-a097-aa666d6ffdfa req-0a5c6153-67ce-4fcd-9481-497f05a6048f service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Neutron deleted interface 8ba6ad85-c68d-4f7c-87dd-acd4fe19986b; detaching it from the instance and deleting it from the info cache [ 1534.665765] env[62508]: DEBUG nova.network.neutron [req-dead152b-6796-4885-a097-aa666d6ffdfa req-0a5c6153-67ce-4fcd-9481-497f05a6048f service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.719209] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021ddb84-8773-4085-81c0-f4940c6c6919 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.733175] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202539} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.733723] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775985, 'name': ReconfigVM_Task, 'duration_secs': 0.298453} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.733944] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775984, 'name': PowerOnVM_Task, 'duration_secs': 0.919764} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.734164] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a49b7-708a-5861-3dc4-35b8d6393947, 'name': SearchDatastore_Task, 'duration_secs': 0.012832} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.736330] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1534.736551] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1534.736707] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1534.736891] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1534.737089] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1534.737248] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1534.737450] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1534.737608] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1534.737770] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1534.737931] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1534.738181] env[62508]: DEBUG nova.virt.hardware [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1534.740975] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.741565] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1534.742070] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1534.742287] env[62508]: DEBUG nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1534.743557] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49184db3-2545-4baa-8aa9-3877d36ff50e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.748349] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4834e41e-a6b4-40c1-a142-91bdf8d4a21a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.750989] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0f7c88d-ff8f-44e9-b2d6-01400fe79817 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.755655] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee803ff-b5e3-4a80-a0e9-7349ceef3aa3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.755655] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4db16ae5-3fb9-4d16-a95e-188f6713ee44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.761178] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4e681a-29d1-4b1c-804a-13eacf1a49dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.769327] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a81c2fd-5d21-4daf-aa2f-9990945cbe6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.796658] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] fd658703-d477-4d21-b0ad-7ff08d4c2f97/fd658703-d477-4d21-b0ad-7ff08d4c2f97.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.797062] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1534.797062] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522cd1ca-17d1-e173-6a07-9ba04cf8e1e4" [ 1534.797062] env[62508]: _type = "Task" [ 1534.797062] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.807640] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-139f56c5-628b-484e-8e5d-48ffe71a00d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.824040] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1534.824040] env[62508]: value = "task-1775987" [ 1534.824040] env[62508]: _type = "Task" [ 1534.824040] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.825531] env[62508]: DEBUG nova.compute.provider_tree [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.842732] env[62508]: DEBUG nova.scheduler.client.report [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1534.852336] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1534.852336] env[62508]: value = "task-1775988" [ 1534.852336] env[62508]: _type = "Task" [ 1534.852336] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.856613] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522cd1ca-17d1-e173-6a07-9ba04cf8e1e4, 'name': SearchDatastore_Task, 'duration_secs': 0.01105} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.864357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.864357] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] cdb1ccaf-83b3-48f8-92da-aca2310863ac/cdb1ccaf-83b3-48f8-92da-aca2310863ac.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.864357] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775987, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.864357] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db802801-8fc9-44be-a8c2-c8758dcc20b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.870702] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.874962] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1534.874962] env[62508]: value = "task-1775989" [ 1534.874962] env[62508]: _type = "Task" [ 1534.874962] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.884692] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.142411] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Updated VIF entry in instance network info cache for port 8cfbc033-eadd-41fc-b4d6-c564a92f8b93. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1535.142411] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Updating instance_info_cache with network_info: [{"id": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "address": "fa:16:3e:83:c9:35", "network": {"id": "dcb8cc3a-3c99-4772-ad15-00d05b9e2b63", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1988455880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514b44e915e543d89fb0429f0357c86c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cfbc033-ea", "ovs_interfaceid": "8cfbc033-eadd-41fc-b4d6-c564a92f8b93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.170727] env[62508]: DEBUG oslo_concurrency.lockutils [req-e696d38e-9f79-4e21-bd0f-305af2d1ea84 req-a9aa1469-4bdc-4e5e-814b-4badfcc8f20f service nova] Releasing lock "refresh_cache-a0245a18-638d-4c32-bea2-456408b5e001" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.170727] env[62508]: INFO nova.compute.manager [-] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Took 2.46 seconds to deallocate network for instance. [ 1535.189802] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ef6552d-1ac7-4b91-aeef-4dffc10f7ccd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.203035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85191cb2-9f1d-4382-beed-502e1fdc3791 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.216441] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.217238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.248272] env[62508]: DEBUG nova.compute.manager [req-dead152b-6796-4885-a097-aa666d6ffdfa req-0a5c6153-67ce-4fcd-9481-497f05a6048f service nova] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Detach interface failed, port_id=8ba6ad85-c68d-4f7c-87dd-acd4fe19986b, reason: Instance e156aef5-bb56-4c17-9e7e-9419b672c9cf could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1535.309382] env[62508]: INFO nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] bringing vm to original state: 'stopped' [ 1535.356361] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775987, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.366410] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775988, 'name': ReconfigVM_Task, 'duration_secs': 0.276413} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.366718] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Reconfigured VM instance instance-0000003b to attach disk [datastore1] fd658703-d477-4d21-b0ad-7ff08d4c2f97/fd658703-d477-4d21-b0ad-7ff08d4c2f97.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1535.368448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-baff0948-4dec-43fd-af01-3932784af10f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.381918] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1535.381918] env[62508]: value = "task-1775990" [ 1535.381918] env[62508]: _type = "Task" [ 1535.381918] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.389043] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.395064] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775990, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.428508] env[62508]: DEBUG nova.compute.manager [req-38643c3e-f8ce-4e61-bd1c-c04564ca50d7 req-5401b140-6393-47ff-8e42-0727bcdf2fea service nova] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Received event network-vif-deleted-9b029442-1a65-4a97-ac36-d00548fe1f7d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.428690] env[62508]: DEBUG nova.compute.manager [req-38643c3e-f8ce-4e61-bd1c-c04564ca50d7 req-5401b140-6393-47ff-8e42-0727bcdf2fea service nova] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Received event network-vif-deleted-179c7569-ad66-43e9-976d-5c0146e5f13f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.646052] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Releasing lock "refresh_cache-fd658703-d477-4d21-b0ad-7ff08d4c2f97" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.646346] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.646549] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.646755] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.646924] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.647156] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] No waiting events found dispatching network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1535.647359] env[62508]: WARNING nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received unexpected event network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e for instance with vm_state building and task_state spawning. [ 1535.647530] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.647694] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing instance network info cache due to event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1535.649565] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.649565] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.649565] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1535.677314] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.850865] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775987, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.858540] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.558s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.864051] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.809s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.864274] env[62508]: DEBUG nova.objects.instance [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lazy-loading 'resources' on Instance uuid 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1535.891594] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.897415] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775990, 'name': Rename_Task, 'duration_secs': 0.155279} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.897726] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.898019] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f0b7396-9deb-45c7-801d-f292080e2e45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.905580] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1535.905580] env[62508]: value = "task-1775991" [ 1535.905580] env[62508]: _type = "Task" [ 1535.905580] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.912759] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.316952] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.317279] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.317474] env[62508]: DEBUG nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.318526] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1a2c7a-e171-4012-96b4-9c7dc4e8e571 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.327785] env[62508]: DEBUG nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1536.330366] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1536.330897] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a5f9cef-3ce3-4d98-9e86-f4b182a855af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.340415] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1536.340415] env[62508]: value = "task-1775992" [ 1536.340415] env[62508]: _type = "Task" [ 1536.340415] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.358053] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775987, 'name': Rename_Task, 'duration_secs': 1.158302} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.362070] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.362701] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.362785] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cc33e12-0d74-43d7-9594-407cd7ae1003 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.369145] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1536.369145] env[62508]: value = "task-1775993" [ 1536.369145] env[62508]: _type = "Task" [ 1536.369145] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.383637] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.393756] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775989, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.513934} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.394413] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] cdb1ccaf-83b3-48f8-92da-aca2310863ac/cdb1ccaf-83b3-48f8-92da-aca2310863ac.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.394413] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.398806] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6e99b5e-1137-4e0a-a598-b2d5c8b9f306 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.405746] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1536.405746] env[62508]: value = "task-1775994" [ 1536.405746] env[62508]: _type = "Task" [ 1536.405746] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.422367] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775991, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.426577] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.572856] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Successfully updated port: 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1536.617042] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.618319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.624355] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.624629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.624819] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.624969] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.625179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.628261] env[62508]: INFO nova.compute.manager [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Terminating instance [ 1536.630832] env[62508]: DEBUG nova.compute.manager [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1536.631059] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1536.632302] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435686b7-c665-4545-9000-f4c994b3d03c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.644514] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1536.644786] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca6563bc-03da-4aba-be4d-c3854c9c17ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.653154] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1536.653154] env[62508]: value = "task-1775995" [ 1536.653154] env[62508]: _type = "Task" [ 1536.653154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.665755] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.721317] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updated VIF entry in instance network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.721731] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.750256] env[62508]: DEBUG nova.compute.manager [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Received event network-vif-plugged-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1536.750336] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.750555] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.750663] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.750882] env[62508]: DEBUG nova.compute.manager [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] No waiting events found dispatching network-vif-plugged-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1536.751058] env[62508]: WARNING nova.compute.manager [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Received unexpected event network-vif-plugged-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 for instance with vm_state building and task_state spawning. [ 1536.751900] env[62508]: DEBUG nova.compute.manager [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Received event network-changed-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1536.751900] env[62508]: DEBUG nova.compute.manager [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Refreshing instance network info cache due to event network-changed-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1536.751900] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Acquiring lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.751900] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Acquired lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.751900] env[62508]: DEBUG nova.network.neutron [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Refreshing network info cache for port 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1536.855606] env[62508]: DEBUG oslo_vmware.api [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1775992, 'name': PowerOffVM_Task, 'duration_secs': 0.440659} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.860371] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.860702] env[62508]: DEBUG nova.compute.manager [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.862261] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86b1a01-3750-43b2-86f6-efd3d6edb6eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.890256] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775993, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.918906] env[62508]: DEBUG oslo_vmware.api [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1775991, 'name': PowerOnVM_Task, 'duration_secs': 0.670836} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.919533] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.919730] env[62508]: INFO nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Took 14.15 seconds to spawn the instance on the hypervisor. [ 1536.919905] env[62508]: DEBUG nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.920659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251630f5-201b-441e-9288-96fcea768521 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.926369] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069454} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.927803] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1536.928557] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cfc12a-a2f0-4abe-99c1-30bcf6f49b0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.932694] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ad8caf-8f88-439d-b792-adee0e241178 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.954573] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa9d021-5ac7-428f-bd18-755112f18a47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.973791] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] cdb1ccaf-83b3-48f8-92da-aca2310863ac/cdb1ccaf-83b3-48f8-92da-aca2310863ac.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1536.975093] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8367ca7d-fdc0-4079-84dd-e1a65ea29566 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.026468] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458ee521-11f6-4dc0-b5fe-6fcbb55ccd8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.030310] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1537.030310] env[62508]: value = "task-1775996" [ 1537.030310] env[62508]: _type = "Task" [ 1537.030310] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.037714] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76eea9f-6f75-474c-bb8a-24e8909c31c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.044569] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.054418] env[62508]: DEBUG nova.compute.provider_tree [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.077497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.128528] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1537.170525] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775995, 'name': PowerOffVM_Task, 'duration_secs': 0.247244} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.172106] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1537.172376] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1537.172564] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-788e7658-d630-41bf-994f-cd6a6ca53f94 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.227183] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.227458] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Received event network-vif-plugged-93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.227761] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquiring lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.228030] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.228204] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.228382] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] No waiting events found dispatching network-vif-plugged-93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1537.228549] env[62508]: WARNING nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Received unexpected event network-vif-plugged-93b3f95e-7a03-4164-a2b0-8b0c647d4377 for instance with vm_state building and task_state spawning. [ 1537.228714] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Received event network-changed-93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.228877] env[62508]: DEBUG nova.compute.manager [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Refreshing instance network info cache due to event network-changed-93b3f95e-7a03-4164-a2b0-8b0c647d4377. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1537.229101] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquiring lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.229274] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Acquired lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.229457] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Refreshing network info cache for port 93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.290699] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1537.290699] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1537.293878] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleting the datastore file [datastore1] 45de6dd5-97f3-4eea-a171-0254a2b37a41 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1537.293878] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72038072-a3ca-4497-9fee-b4c47a7aa48d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.298137] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1537.298137] env[62508]: value = "task-1775998" [ 1537.298137] env[62508]: _type = "Task" [ 1537.298137] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.302592] env[62508]: DEBUG nova.network.neutron [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1537.307852] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.383831] env[62508]: DEBUG oslo_vmware.api [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1775993, 'name': PowerOnVM_Task, 'duration_secs': 0.75922} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.384305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.067s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.385204] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1537.385409] env[62508]: INFO nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1537.385589] env[62508]: DEBUG nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1537.386422] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9397510-b1a7-4743-b404-7927877a3eb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.410983] env[62508]: INFO nova.compute.manager [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Swapping old allocation on dict_keys(['5d5b4923-a8ac-4688-9f86-2405bd3406a9']) held by migration 4fdfcd20-c840-423e-a6be-dfa3f61e5d01 for instance [ 1537.447218] env[62508]: DEBUG nova.scheduler.client.report [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Overwriting current allocation {'allocations': {'5d5b4923-a8ac-4688-9f86-2405bd3406a9': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 90}}, 'project_id': 'bf3fdb58653a47149b5ae7316424d235', 'user_id': '1bebc8625d4842c3b630da112442bcbb', 'consumer_generation': 1} on consumer e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 {{(pid=62508) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1537.449726] env[62508]: INFO nova.compute.manager [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Took 47.39 seconds to build instance. [ 1537.457139] env[62508]: DEBUG nova.network.neutron [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.540807] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775996, 'name': ReconfigVM_Task, 'duration_secs': 0.440834} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.541130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Reconfigured VM instance instance-0000003d to attach disk [datastore1] cdb1ccaf-83b3-48f8-92da-aca2310863ac/cdb1ccaf-83b3-48f8-92da-aca2310863ac.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.541811] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6391a4e-0401-49e4-88a1-b106607cb097 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.547480] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1537.547480] env[62508]: value = "task-1775999" [ 1537.547480] env[62508]: _type = "Task" [ 1537.547480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.555651] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775999, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.557627] env[62508]: DEBUG nova.scheduler.client.report [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1537.567127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.567244] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquired lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.567319] env[62508]: DEBUG nova.network.neutron [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.656120] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.813621] env[62508]: DEBUG oslo_vmware.api [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1775998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.813621] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1537.814050] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1537.814050] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1537.814369] env[62508]: INFO nova.compute.manager [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1537.814662] env[62508]: DEBUG oslo.service.loopingcall [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.814908] env[62508]: DEBUG nova.compute.manager [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1537.815015] env[62508]: DEBUG nova.network.neutron [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1537.895790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.909694] env[62508]: INFO nova.compute.manager [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Took 40.09 seconds to build instance. [ 1537.951969] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8670a597-1b2d-4599-8476-7e57c78f1543 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.862s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.962113] env[62508]: DEBUG oslo_concurrency.lockutils [req-5e26b6d1-2150-4153-8fee-44ff8c4aa5c0 req-b765d7c9-d01a-4b3d-b5f2-60ccbe79cc3e service nova] Releasing lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.962113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.962113] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.060675] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1775999, 'name': Rename_Task, 'duration_secs': 0.15469} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.060917] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.061343] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d0fa65a-2022-4455-974c-d2a8abeb5cb7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.063719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.066051] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.853s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.066244] env[62508]: DEBUG nova.objects.instance [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1538.079201] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1538.079201] env[62508]: value = "task-1776000" [ 1538.079201] env[62508]: _type = "Task" [ 1538.079201] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.096049] env[62508]: INFO nova.scheduler.client.report [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Deleted allocations for instance 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad [ 1538.100679] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776000, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.171519] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Updated VIF entry in instance network info cache for port 93b3f95e-7a03-4164-a2b0-8b0c647d4377. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1538.171934] env[62508]: DEBUG nova.network.neutron [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Updating instance_info_cache with network_info: [{"id": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "address": "fa:16:3e:2f:39:be", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93b3f95e-7a", "ovs_interfaceid": "93b3f95e-7a03-4164-a2b0-8b0c647d4377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.288571] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.288896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.289133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.289318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.289488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.295308] env[62508]: INFO nova.compute.manager [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Terminating instance [ 1538.300134] env[62508]: DEBUG nova.compute.manager [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1538.300134] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1538.300823] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2632000f-ba30-4a97-95ec-bff5adea59ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.309162] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1538.309409] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3350ee9-b9f0-47b7-8b1b-e5f0d3d5aa22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.316416] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1538.316416] env[62508]: value = "task-1776001" [ 1538.316416] env[62508]: _type = "Task" [ 1538.316416] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.327240] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1776001, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.411700] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f33ebe99-7dbf-4ff3-a5ae-d19a50225480 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.889s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.439095] env[62508]: DEBUG nova.network.neutron [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [{"id": "cafd5648-99e8-4c28-92bb-439b1d656b15", "address": "fa:16:3e:79:5e:f6", "network": {"id": "9dcf3f5d-9725-4444-92fa-4343cae84555", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a593c1556b124090beac642efa68ce00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcafd5648-99", "ovs_interfaceid": "cafd5648-99e8-4c28-92bb-439b1d656b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.503402] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1538.598780] env[62508]: DEBUG oslo_vmware.api [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776000, 'name': PowerOnVM_Task, 'duration_secs': 0.486071} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.599076] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.599286] env[62508]: INFO nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Took 8.87 seconds to spawn the instance on the hypervisor. [ 1538.599457] env[62508]: DEBUG nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.600293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a403999-f55e-4592-8fe2-d51715a0fb94 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.619569] env[62508]: DEBUG oslo_concurrency.lockutils [None req-7ba54dc0-80f6-4902-9c8c-44e3f0008950 tempest-SecurityGroupsTestJSON-2036996264 tempest-SecurityGroupsTestJSON-2036996264-project-member] Lock "84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.221s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.675614] env[62508]: DEBUG oslo_concurrency.lockutils [req-13e1f54c-e044-45be-84c7-2f58b3f141d7 req-b15bcff8-fdca-4757-b5a8-aa681f96b3f0 service nova] Releasing lock "refresh_cache-cdb1ccaf-83b3-48f8-92da-aca2310863ac" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.682931] env[62508]: DEBUG nova.network.neutron [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.723208] env[62508]: DEBUG nova.network.neutron [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating instance_info_cache with network_info: [{"id": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "address": "fa:16:3e:58:65:62", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b14f39-9f", "ovs_interfaceid": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.827330] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1776001, 'name': PowerOffVM_Task, 'duration_secs': 0.229094} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.827627] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1538.827627] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1538.827801] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6defce2-6068-419a-9c25-3da2782377c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.941029] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1538.941029] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1538.941029] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Deleting the datastore file [datastore1] fd658703-d477-4d21-b0ad-7ff08d4c2f97 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1538.941029] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0474cc5-50d8-47ee-b8b6-5d4feb4a5236 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.946185] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Releasing lock "refresh_cache-e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.946185] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1538.946185] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ac13e96-e949-481e-abaa-6bf213be3a69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.951525] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for the task: (returnval){ [ 1538.951525] env[62508]: value = "task-1776003" [ 1538.951525] env[62508]: _type = "Task" [ 1538.951525] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.953063] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1538.953063] env[62508]: value = "task-1776004" [ 1538.953063] env[62508]: _type = "Task" [ 1538.953063] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.969667] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1776003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.973520] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.999559] env[62508]: DEBUG nova.compute.manager [req-22d10ec0-ee52-461d-98b0-c12e77e591a5 req-e3f0aacf-7173-4a70-b21e-58ee2fd775c3 service nova] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Received event network-vif-deleted-268ea73b-a264-453c-969d-59f58dd50192 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.079681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c6d63c6c-b209-439b-b6e4-8121b4d56288 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.080953] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.805s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.083022] env[62508]: INFO nova.compute.claims [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.137136] env[62508]: INFO nova.compute.manager [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Took 29.81 seconds to build instance. [ 1539.186168] env[62508]: INFO nova.compute.manager [-] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Took 1.37 seconds to deallocate network for instance. [ 1539.226370] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.226488] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance network_info: |[{"id": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "address": "fa:16:3e:58:65:62", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b14f39-9f", "ovs_interfaceid": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1539.226837] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:65:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69b14f39-9f95-4e4c-a3dd-437cf82d8fa0', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.235787] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Creating folder: Project (08a2bcaca6e4406a8ccda7b934995f15). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.236862] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92aab08e-86f7-4116-81ce-2759551e23ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.248316] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Created folder: Project (08a2bcaca6e4406a8ccda7b934995f15) in parent group-v368536. [ 1539.248515] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Creating folder: Instances. Parent ref: group-v368711. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.249293] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77511d2a-459c-45a6-9732-41c5c7f7b6df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.258182] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Created folder: Instances in parent group-v368711. [ 1539.258398] env[62508]: DEBUG oslo.service.loopingcall [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.258671] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1539.258805] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aed3fcc6-c8e3-4f0f-b51a-aabaa9132dbf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.285766] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.285766] env[62508]: value = "task-1776007" [ 1539.285766] env[62508]: _type = "Task" [ 1539.285766] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.298733] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776007, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.471021] env[62508]: DEBUG nova.compute.manager [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.471021] env[62508]: DEBUG nova.compute.manager [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing instance network info cache due to event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1539.471021] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.471021] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.471021] env[62508]: DEBUG nova.network.neutron [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1539.475660] env[62508]: DEBUG oslo_vmware.api [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Task: {'id': task-1776003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350117} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.480659] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1539.480659] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1539.480659] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1539.480659] env[62508]: INFO nova.compute.manager [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1539.480659] env[62508]: DEBUG oslo.service.loopingcall [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.484621] env[62508]: DEBUG nova.compute.manager [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1539.484874] env[62508]: DEBUG nova.network.neutron [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1539.487179] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776004, 'name': PowerOffVM_Task, 'duration_secs': 0.228765} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.488551] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1539.489499] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:10:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='192acf26-1962-4b3a-b461-037bee820f70',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1531657240',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1539.490023] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1539.490395] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1539.490957] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1539.491297] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1539.493026] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1539.493026] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1539.493026] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1539.493026] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1539.493026] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1539.493512] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1539.501974] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d311848a-97f9-41ad-a24b-900c286b5746 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.522718] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1539.522718] env[62508]: value = "task-1776008" [ 1539.522718] env[62508]: _type = "Task" [ 1539.522718] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.533279] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.643167] env[62508]: DEBUG oslo_concurrency.lockutils [None req-54d0e2bc-f612-4f0b-914b-137271bf8d2f tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.329s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.665366] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.665658] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.665913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "38289797-ecf5-4207-a164-d70228e4411d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.666194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.666263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.668958] env[62508]: INFO nova.compute.manager [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Terminating instance [ 1539.671312] env[62508]: DEBUG nova.compute.manager [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1539.671534] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1539.672652] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a58ea3-efaf-4f62-98c0-db459b7dd272 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.687228] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1539.687547] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1b707c7-8a86-44e7-b06f-afb602c6425d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.692802] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.794182] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1539.794375] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1539.794615] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] 38289797-ecf5-4207-a164-d70228e4411d {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1539.798915] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11d2a80d-4a10-4c41-bdf2-6b724e7c300d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.803141] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776007, 'name': CreateVM_Task, 'duration_secs': 0.383258} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.803141] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1539.803141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.803661] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.804345] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1539.806157] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb588b21-7132-4013-85ee-f250c48e86cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.808566] env[62508]: DEBUG oslo_vmware.api [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1539.808566] env[62508]: value = "task-1776010" [ 1539.808566] env[62508]: _type = "Task" [ 1539.808566] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.813472] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1539.813472] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5298fd24-c06e-ed7f-7fc8-857714b980ff" [ 1539.813472] env[62508]: _type = "Task" [ 1539.813472] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.822017] env[62508]: DEBUG oslo_vmware.api [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.829542] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5298fd24-c06e-ed7f-7fc8-857714b980ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.830285] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.830715] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1539.830990] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.832026] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.832026] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1539.832026] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84e67670-f365-4f81-b905-42d7eb28cede {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.842273] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1539.842503] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1539.843526] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9d7b3cd-4a03-428b-86a9-c599257a1f03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.850016] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1539.850016] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527cbb7d-3392-dbd4-dee0-af653817f94c" [ 1539.850016] env[62508]: _type = "Task" [ 1539.850016] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.858844] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527cbb7d-3392-dbd4-dee0-af653817f94c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.041160] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776008, 'name': ReconfigVM_Task, 'duration_secs': 0.180059} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.042375] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e92325-72fa-4ceb-ba4b-792f3ad1b69d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:10:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='192acf26-1962-4b3a-b461-037bee820f70',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1531657240',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1540.063558] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1540.064054] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1540.064054] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1540.064113] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1540.064422] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1540.064422] env[62508]: DEBUG nova.virt.hardware [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1540.068362] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38f6f57e-fa48-43cd-88e1-0f7b66162753 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.081421] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1540.081421] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c1fcc7-6cbd-c56d-fd6c-3c7df0c3a755" [ 1540.081421] env[62508]: _type = "Task" [ 1540.081421] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.086560] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c1fcc7-6cbd-c56d-fd6c-3c7df0c3a755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.214142] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.214447] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.318854] env[62508]: DEBUG oslo_vmware.api [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186706} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.321710] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1540.321903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1540.322099] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1540.322298] env[62508]: INFO nova.compute.manager [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1540.322904] env[62508]: DEBUG oslo.service.loopingcall [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.324025] env[62508]: DEBUG nova.compute.manager [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1540.324025] env[62508]: DEBUG nova.network.neutron [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1540.362968] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527cbb7d-3392-dbd4-dee0-af653817f94c, 'name': SearchDatastore_Task, 'duration_secs': 0.009557} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.363944] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dbb1e9e-8df2-4a50-b159-b36714cfaac8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.376353] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1540.376353] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52476258-18e8-ec8a-83ef-ed1004edb48e" [ 1540.376353] env[62508]: _type = "Task" [ 1540.376353] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.391865] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52476258-18e8-ec8a-83ef-ed1004edb48e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.517972] env[62508]: DEBUG nova.network.neutron [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.560037] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a72d1b0-3e69-4777-84d8-be6126062433 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.569992] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38bf51b-badd-4cea-b77c-b06d4b6e21fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.605489] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9174f1-d434-4eb6-848b-42506cae5972 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.616312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36e5a7b-e2d5-41c7-b85d-3ff75c0854a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.620310] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c1fcc7-6cbd-c56d-fd6c-3c7df0c3a755, 'name': SearchDatastore_Task, 'duration_secs': 0.02935} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.626445] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1540.629196] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-942f35dc-40ff-4d48-ba75-20fdc5d58f35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.650648] env[62508]: DEBUG nova.compute.provider_tree [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.659732] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1540.659732] env[62508]: value = "task-1776011" [ 1540.659732] env[62508]: _type = "Task" [ 1540.659732] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.667133] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776011, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.717110] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1540.720610] env[62508]: DEBUG nova.network.neutron [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updated VIF entry in instance network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1540.720962] env[62508]: DEBUG nova.network.neutron [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.888571] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52476258-18e8-ec8a-83ef-ed1004edb48e, 'name': SearchDatastore_Task, 'duration_secs': 0.016404} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.890719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.890719] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] deee2c81-4d2c-47d3-aae6-ef829d59c644/deee2c81-4d2c-47d3-aae6-ef829d59c644.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1540.890719] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74dcf133-0be3-4826-a0b2-fbf0ee770668 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.896142] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1540.896142] env[62508]: value = "task-1776012" [ 1540.896142] env[62508]: _type = "Task" [ 1540.896142] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.904610] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.021360] env[62508]: INFO nova.compute.manager [-] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Took 1.54 seconds to deallocate network for instance. [ 1541.035790] env[62508]: DEBUG nova.compute.manager [req-36be3bc7-9c05-40ec-ae34-0c1aa1e7e72a req-488003a8-341c-456c-a017-1d2427b8b400 service nova] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Received event network-vif-deleted-8cfbc033-eadd-41fc-b4d6-c564a92f8b93 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.035924] env[62508]: DEBUG nova.compute.manager [req-36be3bc7-9c05-40ec-ae34-0c1aa1e7e72a req-488003a8-341c-456c-a017-1d2427b8b400 service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Received event network-vif-deleted-32066ea0-906e-445b-89fa-625dd3384edf {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.037045] env[62508]: INFO nova.compute.manager [req-36be3bc7-9c05-40ec-ae34-0c1aa1e7e72a req-488003a8-341c-456c-a017-1d2427b8b400 service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Neutron deleted interface 32066ea0-906e-445b-89fa-625dd3384edf; detaching it from the instance and deleting it from the info cache [ 1541.037857] env[62508]: DEBUG nova.network.neutron [req-36be3bc7-9c05-40ec-ae34-0c1aa1e7e72a req-488003a8-341c-456c-a017-1d2427b8b400 service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.154197] env[62508]: DEBUG nova.scheduler.client.report [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1541.172910] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776011, 'name': ReconfigVM_Task, 'duration_secs': 0.427222} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.174755] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1541.176598] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6d0477-84bf-4819-80e6-ad210e5ae01e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.212387] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1541.213482] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30376784-f888-4927-b0fe-a6fd4318ce4d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.237183] env[62508]: DEBUG oslo_concurrency.lockutils [req-6dbac400-0d49-4e18-b45d-9c5dfe0fbae3 req-1f86edd6-6fa1-46c7-8f15-3aa169bdb802 service nova] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.244612] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1541.244612] env[62508]: value = "task-1776013" [ 1541.244612] env[62508]: _type = "Task" [ 1541.244612] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.253069] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.256746] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.260267] env[62508]: DEBUG nova.network.neutron [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.407227] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776012, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.490693] env[62508]: DEBUG nova.compute.manager [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1541.491686] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42aca37-6303-4a33-9cd5-87c234ecedd0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.531139] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.545555] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bca641b-2111-432c-8d00-c58594f6970b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.555187] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d987d3-b7e3-479a-954c-a1b9829cbf70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.593301] env[62508]: DEBUG nova.compute.manager [req-36be3bc7-9c05-40ec-ae34-0c1aa1e7e72a req-488003a8-341c-456c-a017-1d2427b8b400 service nova] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Detach interface failed, port_id=32066ea0-906e-445b-89fa-625dd3384edf, reason: Instance 38289797-ecf5-4207-a164-d70228e4411d could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1541.666106] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.666687] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1541.669299] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.061s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.670684] env[62508]: INFO nova.compute.claims [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1541.756141] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.762687] env[62508]: INFO nova.compute.manager [-] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Took 1.44 seconds to deallocate network for instance. [ 1541.908375] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776012, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607531} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.908631] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] deee2c81-4d2c-47d3-aae6-ef829d59c644/deee2c81-4d2c-47d3-aae6-ef829d59c644.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1541.909067] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1541.909613] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ccdd576b-eb3d-4717-b683-71fbed77a724 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.916024] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1541.916024] env[62508]: value = "task-1776014" [ 1541.916024] env[62508]: _type = "Task" [ 1541.916024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.923573] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.002445] env[62508]: INFO nova.compute.manager [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] instance snapshotting [ 1542.005393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10171f77-34b2-4388-b382-324b04e92845 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.025978] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fa41bc-858d-4d13-85b5-3e6303e34c84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.175535] env[62508]: DEBUG nova.compute.utils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1542.179712] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1542.179898] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1542.255710] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.269290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.274493] env[62508]: DEBUG nova.policy [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c96cc4a58a4321837c1ab8badc686a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0156fba01363470eaa9771d5f296f730', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1542.426094] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231116} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.426393] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.427501] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597490eb-dfa8-4296-816e-f2077f8dda54 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.449887] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] deee2c81-4d2c-47d3-aae6-ef829d59c644/deee2c81-4d2c-47d3-aae6-ef829d59c644.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.450605] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-547931cb-4f50-4a37-9e6f-8d1b07a2317c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.470946] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1542.470946] env[62508]: value = "task-1776015" [ 1542.470946] env[62508]: _type = "Task" [ 1542.470946] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.479032] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.537025] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1542.537025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e2360d89-ebb5-4ebd-b2ec-c5eb7eb83a4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.541640] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.542089] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.545852] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1542.545852] env[62508]: value = "task-1776016" [ 1542.545852] env[62508]: _type = "Task" [ 1542.545852] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.556400] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.600534] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Successfully created port: 2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1542.680807] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1542.764976] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.982759] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776015, 'name': ReconfigVM_Task, 'duration_secs': 0.31573} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.985384] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfigured VM instance instance-0000003e to attach disk [datastore1] deee2c81-4d2c-47d3-aae6-ef829d59c644/deee2c81-4d2c-47d3-aae6-ef829d59c644.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1542.989349] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcd192a6-c43d-477c-b0a8-882c3024ea7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.995739] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1542.995739] env[62508]: value = "task-1776017" [ 1542.995739] env[62508]: _type = "Task" [ 1542.995739] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.003605] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776017, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.056777] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.056968] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1543.063716] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.143482] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5ba214-3b19-4056-b169-09f38d95115c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.151231] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275729f9-a03d-4312-a229-4108e2d27305 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.185224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100d91b2-5726-401b-bf9d-80c6ffc6ec83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.196542] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9baa970-ce3c-469b-906b-6884bc31f2b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.213599] env[62508]: DEBUG nova.compute.provider_tree [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.258927] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.507381] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776017, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.561620] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.694011] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1543.717074] env[62508]: DEBUG nova.scheduler.client.report [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1543.728541] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.728779] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.730645] env[62508]: DEBUG nova.virt.hardware [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.731523] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03e0488-0ea0-4f41-ad8e-a95879bc6c7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.741579] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981caf97-2c81-4cbb-91cf-68da42784136 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.767783] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776013, 'name': ReconfigVM_Task, 'duration_secs': 2.319146} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.768212] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6/e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1543.769056] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da10ec6-feb3-48b4-82ba-399d8c82563a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.788383] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36729560-021d-47d5-b083-502c222a5419 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.806601] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a75874-314e-41ac-b869-620b356befe7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.827276] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d5f21c-c3e4-4d91-a9d3-e64c264749df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.834759] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1543.834964] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88efbec2-1d7e-4df1-bbe7-85167594a6f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.841815] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1543.841815] env[62508]: value = "task-1776018" [ 1543.841815] env[62508]: _type = "Task" [ 1543.841815] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.850175] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.008785] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776017, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.057500] env[62508]: DEBUG nova.compute.manager [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Received event network-vif-plugged-2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.057722] env[62508]: DEBUG oslo_concurrency.lockutils [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.058012] env[62508]: DEBUG oslo_concurrency.lockutils [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.058210] env[62508]: DEBUG oslo_concurrency.lockutils [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.058394] env[62508]: DEBUG nova.compute.manager [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] No waiting events found dispatching network-vif-plugged-2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1544.058559] env[62508]: WARNING nova.compute.manager [req-06a069aa-3b8e-451f-899a-4688beb3e44a req-e48f0383-47a9-4a1b-9331-20406ccce806 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Received unexpected event network-vif-plugged-2fdf0128-ddf8-4030-a2be-bf738efcd699 for instance with vm_state building and task_state spawning. [ 1544.062549] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.207531] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Successfully updated port: 2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.225111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.225111] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1544.227588] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.011s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.228210] env[62508]: DEBUG nova.objects.instance [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'resources' on Instance uuid f456dd83-6350-46b2-b06c-41dc5c477358 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1544.355834] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776018, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.508085] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776017, 'name': Rename_Task, 'duration_secs': 1.1547} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.508356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.508605] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4acaa3d5-8d4c-4165-badb-f636f2ad3fc2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.515163] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1544.515163] env[62508]: value = "task-1776019" [ 1544.515163] env[62508]: _type = "Task" [ 1544.515163] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.522843] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.561397] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776016, 'name': CreateSnapshot_Task, 'duration_secs': 1.790209} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.562814] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1544.562814] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc98949-ef8f-4e33-a80a-0381b4858cf2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.714133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.714133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.714133] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.728845] env[62508]: DEBUG nova.compute.utils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1544.730430] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1544.730697] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1544.795904] env[62508]: DEBUG nova.policy [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '596ff35abb3949e9b3d3d9b80e6eae69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a9d6b8eb4e44a7a3d7fa4abe0cd5bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1544.855029] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776018, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.026335] env[62508]: DEBUG oslo_vmware.api [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776019, 'name': PowerOnVM_Task, 'duration_secs': 0.50662} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.026622] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.026818] env[62508]: INFO nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Took 10.36 seconds to spawn the instance on the hypervisor. [ 1545.027035] env[62508]: DEBUG nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1545.028807] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc96ea1-ce2b-4615-ad71-5755c3237bde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.084250] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1545.086367] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-aa0827ea-9613-4c22-8884-961fc7c99783 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.094373] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1545.094373] env[62508]: value = "task-1776020" [ 1545.094373] env[62508]: _type = "Task" [ 1545.094373] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.110725] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776020, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.112095] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.112181] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.112329] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1545.134713] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Successfully created port: 8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.156782] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f408a2ba-cdfd-4958-b3da-5777eeaf80cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.168139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcc82d4-347a-4399-bc01-c9c501790484 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.202798] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb11f1f-85b7-4e4a-a73e-9d10353de32d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.211603] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d6cb67-52bf-4d5a-8795-c6c91957561c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.229766] env[62508]: DEBUG nova.compute.provider_tree [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1545.238164] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1545.260367] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1545.356471] env[62508]: DEBUG oslo_vmware.api [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776018, 'name': PowerOnVM_Task, 'duration_secs': 1.098022} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.356997] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.379936] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "29223197-9a79-45cc-baa6-3deb731ec08e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.380308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.549552] env[62508]: DEBUG nova.network.neutron [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.553670] env[62508]: INFO nova.compute.manager [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Took 29.89 seconds to build instance. [ 1545.606294] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776020, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.732404] env[62508]: DEBUG nova.scheduler.client.report [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1545.884768] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1546.054390] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.058013] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Instance network_info: |[{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1546.058013] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a9d3b17b-e3f8-404a-942d-e189f49fc9d9 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.396s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.058013] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:a8:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fdf0128-ddf8-4030-a2be-bf738efcd699', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.065281] env[62508]: DEBUG oslo.service.loopingcall [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.065793] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1546.066517] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea4ea462-6bda-4039-9592-f7f83f7e1c2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.086764] env[62508]: DEBUG nova.compute.manager [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Received event network-changed-2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.087273] env[62508]: DEBUG nova.compute.manager [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Refreshing instance network info cache due to event network-changed-2fdf0128-ddf8-4030-a2be-bf738efcd699. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.088070] env[62508]: DEBUG oslo_concurrency.lockutils [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] Acquiring lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.088070] env[62508]: DEBUG oslo_concurrency.lockutils [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] Acquired lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.088070] env[62508]: DEBUG nova.network.neutron [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Refreshing network info cache for port 2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.090852] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.090852] env[62508]: value = "task-1776021" [ 1546.090852] env[62508]: _type = "Task" [ 1546.090852] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.104472] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776021, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.109876] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776020, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.240076] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.241567] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.024s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.242046] env[62508]: DEBUG nova.objects.instance [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'resources' on Instance uuid 6ae078f6-3b96-4b49-b282-cae74d742c97 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1546.250544] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1546.269877] env[62508]: INFO nova.scheduler.client.report [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocations for instance f456dd83-6350-46b2-b06c-41dc5c477358 [ 1546.286366] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1546.287015] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1546.287015] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.287015] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1546.287948] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.288890] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1546.289180] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1546.289371] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1546.289558] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1546.289766] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1546.289964] env[62508]: DEBUG nova.virt.hardware [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1546.291173] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bac487-31d9-4556-85b1-439f11a8c365 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.301348] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10551824-74a6-420f-ac61-33079194d120 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.367678] env[62508]: INFO nova.compute.manager [None req-a3a825cd-0f4d-4502-acd9-cfc484a5818e tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance to original state: 'active' [ 1546.373245] env[62508]: DEBUG nova.compute.manager [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Received event network-changed-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.373639] env[62508]: DEBUG nova.compute.manager [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Refreshing instance network info cache due to event network-changed-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.374250] env[62508]: DEBUG oslo_concurrency.lockutils [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] Acquiring lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.374370] env[62508]: DEBUG oslo_concurrency.lockutils [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] Acquired lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.374651] env[62508]: DEBUG nova.network.neutron [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Refreshing network info cache for port 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.412271] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.417668] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.607438] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776020, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.610731] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776021, 'name': CreateVM_Task, 'duration_secs': 0.383769} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.610887] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1546.611564] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.611725] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.612054] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.612312] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d6b494-d10e-4dd0-88e0-59f432a45f0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.617349] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1546.617349] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ba43d9-076a-cbf8-099c-42bd35e2d0fd" [ 1546.617349] env[62508]: _type = "Task" [ 1546.617349] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.626524] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ba43d9-076a-cbf8-099c-42bd35e2d0fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.784140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3a3b5749-a943-4472-aa40-d4315c2a9478 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "f456dd83-6350-46b2-b06c-41dc5c477358" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.494s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.921689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.922119] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1546.922775] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.922984] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.923198] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.923385] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.923589] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.923762] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.923905] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1546.924177] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1547.067149] env[62508]: DEBUG nova.network.neutron [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updated VIF entry in instance network info cache for port 2fdf0128-ddf8-4030-a2be-bf738efcd699. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.067149] env[62508]: DEBUG nova.network.neutron [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.115901] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776020, 'name': CloneVM_Task, 'duration_secs': 1.812481} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.118901] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Created linked-clone VM from snapshot [ 1547.120264] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b39018f-774f-4c86-bb7c-43f349b43d97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.132249] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ba43d9-076a-cbf8-099c-42bd35e2d0fd, 'name': SearchDatastore_Task, 'duration_secs': 0.015522} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.137780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.137780] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.137780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.137780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.137780] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1547.137780] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Uploading image 017ea814-a0e1-43a9-ac1c-e0d47472cffd {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1547.142582] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6da0d5a-f740-4654-9cf8-1126d80d98f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.148809] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1547.149100] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5bc091fa-41df-43d9-a142-cc3140620f1f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.153065] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1547.153065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1547.154134] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf2ee08-9322-4efe-8e90-1d2637bd40eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.160218] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1547.160218] env[62508]: value = "task-1776022" [ 1547.160218] env[62508]: _type = "Task" [ 1547.160218] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.167535] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1547.167535] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b63e88-c2e6-057e-a43c-40cc3fb667f1" [ 1547.167535] env[62508]: _type = "Task" [ 1547.167535] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.170553] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Successfully updated port: 8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.176228] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776022, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.182012] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b63e88-c2e6-057e-a43c-40cc3fb667f1, 'name': SearchDatastore_Task, 'duration_secs': 0.012705} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.187092] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0224abf8-78a6-40bf-8e02-96b8e4dd97c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.194448] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1547.194448] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520979ff-435b-1785-1bf4-fa24910554ee" [ 1547.194448] env[62508]: _type = "Task" [ 1547.194448] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.207275] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520979ff-435b-1785-1bf4-fa24910554ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.225878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac55322-f26c-477c-80d8-0189950fd358 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.233626] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc8b658-9b70-4a4d-a18f-ed367519f5d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.266433] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e42537-289a-4c8d-8e6f-a72144095d7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.273989] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421f79cc-3375-4703-bdd7-9c9e89793699 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.287884] env[62508]: DEBUG nova.compute.provider_tree [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.417804] env[62508]: DEBUG nova.network.neutron [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updated VIF entry in instance network info cache for port 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.418213] env[62508]: DEBUG nova.network.neutron [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating instance_info_cache with network_info: [{"id": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "address": "fa:16:3e:58:65:62", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69b14f39-9f", "ovs_interfaceid": "69b14f39-9f95-4e4c-a3dd-437cf82d8fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.429574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.453080] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.453336] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.453674] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.454034] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.454290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.456508] env[62508]: INFO nova.compute.manager [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Terminating instance [ 1547.458350] env[62508]: DEBUG nova.compute.manager [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1547.458562] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1547.459424] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9788b5-ab58-4b3f-9485-4c88a1c65027 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.467775] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1547.468123] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bda02ee-cd62-4c23-bc46-ae6217e6cdd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.474155] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1547.474155] env[62508]: value = "task-1776023" [ 1547.474155] env[62508]: _type = "Task" [ 1547.474155] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.482964] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.569067] env[62508]: DEBUG oslo_concurrency.lockutils [req-944a3b9f-98ba-49ad-baed-662b4d72d2cf req-8b62528b-d68d-477f-987e-6fdef74fcdda service nova] Releasing lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.674794] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776022, 'name': Destroy_Task, 'duration_secs': 0.341312} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.675297] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Destroyed the VM [ 1547.675703] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1547.676512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.676697] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.676926] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.678515] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dff70fcb-3022-44fe-929c-48d1cfa7be93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.689035] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1547.689035] env[62508]: value = "task-1776024" [ 1547.689035] env[62508]: _type = "Task" [ 1547.689035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.709713] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776024, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.716025] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520979ff-435b-1785-1bf4-fa24910554ee, 'name': SearchDatastore_Task, 'duration_secs': 0.013212} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.716476] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.716651] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.716994] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c69dfaa-3382-4363-b000-1e4b8fbb08e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.724143] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1547.724143] env[62508]: value = "task-1776025" [ 1547.724143] env[62508]: _type = "Task" [ 1547.724143] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.732631] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.792041] env[62508]: DEBUG nova.scheduler.client.report [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1547.921819] env[62508]: DEBUG oslo_concurrency.lockutils [req-494d9758-9195-41c8-8125-be984ded00f7 req-d0354b9d-4616-4708-8dbd-2d1d0125842f service nova] Releasing lock "refresh_cache-deee2c81-4d2c-47d3-aae6-ef829d59c644" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.987951] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776023, 'name': PowerOffVM_Task, 'duration_secs': 0.267324} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.988328] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1547.988533] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1547.988816] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5041bf85-9ffb-4789-af92-cb823950a1d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.094900] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1548.095166] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1548.095381] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleting the datastore file [datastore1] e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1548.095744] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f401d404-1fed-41cb-a616-db121162a59b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.102295] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1548.102295] env[62508]: value = "task-1776027" [ 1548.102295] env[62508]: _type = "Task" [ 1548.102295] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.113219] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.115690] env[62508]: DEBUG nova.compute.manager [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Received event network-vif-plugged-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.115690] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Acquiring lock "68d64a06-f752-459c-a152-157893e79bfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.115938] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Lock "68d64a06-f752-459c-a152-157893e79bfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.116102] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Lock "68d64a06-f752-459c-a152-157893e79bfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.116272] env[62508]: DEBUG nova.compute.manager [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] No waiting events found dispatching network-vif-plugged-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1548.116440] env[62508]: WARNING nova.compute.manager [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Received unexpected event network-vif-plugged-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb for instance with vm_state building and task_state spawning. [ 1548.116615] env[62508]: DEBUG nova.compute.manager [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Received event network-changed-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.116778] env[62508]: DEBUG nova.compute.manager [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Refreshing instance network info cache due to event network-changed-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1548.116948] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Acquiring lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.201247] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776024, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.216588] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1548.237302] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776025, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.297972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.302113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.623s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.302113] env[62508]: DEBUG nova.objects.instance [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lazy-loading 'resources' on Instance uuid e156aef5-bb56-4c17-9e7e-9419b672c9cf {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1548.327890] env[62508]: INFO nova.scheduler.client.report [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted allocations for instance 6ae078f6-3b96-4b49-b282-cae74d742c97 [ 1548.422303] env[62508]: DEBUG nova.network.neutron [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Updating instance_info_cache with network_info: [{"id": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "address": "fa:16:3e:9b:ba:5a", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407dcaf-0e", "ovs_interfaceid": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.612250] env[62508]: DEBUG oslo_vmware.api [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353699} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.612469] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1548.612663] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1548.612841] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1548.613036] env[62508]: INFO nova.compute.manager [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1548.613336] env[62508]: DEBUG oslo.service.loopingcall [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1548.613555] env[62508]: DEBUG nova.compute.manager [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1548.613650] env[62508]: DEBUG nova.network.neutron [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1548.700683] env[62508]: DEBUG oslo_vmware.api [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776024, 'name': RemoveSnapshot_Task, 'duration_secs': 0.858837} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.700950] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1548.734959] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596758} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.735281] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.735564] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.735776] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3fd7bc8-c86f-4cc3-be21-58c427951e66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.743463] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1548.743463] env[62508]: value = "task-1776028" [ 1548.743463] env[62508]: _type = "Task" [ 1548.743463] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.752845] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.838598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8bbf924a-3205-4900-b5fe-217133159de0 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "6ae078f6-3b96-4b49-b282-cae74d742c97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.867s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.905988] env[62508]: DEBUG nova.compute.manager [req-decfa463-72a2-4834-a73a-137952eae70a req-b70a7cd8-3281-455f-9e3b-3fb323ef44eb service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Received event network-vif-deleted-cafd5648-99e8-4c28-92bb-439b1d656b15 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.906107] env[62508]: INFO nova.compute.manager [req-decfa463-72a2-4834-a73a-137952eae70a req-b70a7cd8-3281-455f-9e3b-3fb323ef44eb service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Neutron deleted interface cafd5648-99e8-4c28-92bb-439b1d656b15; detaching it from the instance and deleting it from the info cache [ 1548.906492] env[62508]: DEBUG nova.network.neutron [req-decfa463-72a2-4834-a73a-137952eae70a req-b70a7cd8-3281-455f-9e3b-3fb323ef44eb service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.926187] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.926187] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Instance network_info: |[{"id": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "address": "fa:16:3e:9b:ba:5a", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407dcaf-0e", "ovs_interfaceid": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1548.926187] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Acquired lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.926187] env[62508]: DEBUG nova.network.neutron [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Refreshing network info cache for port 8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1548.927223] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:ba:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8407dcaf-0ebb-4d5f-ab85-80cb879b38bb', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1548.935081] env[62508]: DEBUG oslo.service.loopingcall [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1548.938356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1548.939096] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54ad0489-2d86-429c-9635-e686a7f71f86 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.962241] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1548.962241] env[62508]: value = "task-1776029" [ 1548.962241] env[62508]: _type = "Task" [ 1548.962241] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.971215] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776029, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.162949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3524588b-519b-43a6-89ba-62d39c574d9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.173137] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a370b3c5-6c03-4a64-9531-dd726975a246 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.207312] env[62508]: WARNING nova.compute.manager [None req-cfb79a2a-bdda-42ab-9974-55dd7eef3129 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Image not found during snapshot: nova.exception.ImageNotFound: Image 017ea814-a0e1-43a9-ac1c-e0d47472cffd could not be found. [ 1549.209190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b9ea3c-56fd-475a-9b22-6697749ea2e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.217432] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aff764b-d678-4625-a9da-c4fbdbcbe8f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.231729] env[62508]: DEBUG nova.compute.provider_tree [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.252476] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.300365} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.252648] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.253419] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e10998-c131-41bc-8a3b-796d701518cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.277258] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1549.277874] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a6963b9-a11b-4655-8ae3-558d56f577ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.297036] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1549.297036] env[62508]: value = "task-1776030" [ 1549.297036] env[62508]: _type = "Task" [ 1549.297036] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.304914] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.374369] env[62508]: DEBUG nova.network.neutron [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.409713] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5399001-45f5-4ad6-9a97-763855b7e5e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.420878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58766db2-d4bb-42ac-b20b-342c3186eece {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.460220] env[62508]: DEBUG nova.compute.manager [req-decfa463-72a2-4834-a73a-137952eae70a req-b70a7cd8-3281-455f-9e3b-3fb323ef44eb service nova] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Detach interface failed, port_id=cafd5648-99e8-4c28-92bb-439b1d656b15, reason: Instance e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1549.473277] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776029, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.653283] env[62508]: DEBUG nova.network.neutron [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Updated VIF entry in instance network info cache for port 8407dcaf-0ebb-4d5f-ab85-80cb879b38bb. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1549.653665] env[62508]: DEBUG nova.network.neutron [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Updating instance_info_cache with network_info: [{"id": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "address": "fa:16:3e:9b:ba:5a", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407dcaf-0e", "ovs_interfaceid": "8407dcaf-0ebb-4d5f-ab85-80cb879b38bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.735070] env[62508]: DEBUG nova.scheduler.client.report [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1549.807606] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.878546] env[62508]: INFO nova.compute.manager [-] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Took 1.26 seconds to deallocate network for instance. [ 1549.974052] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776029, 'name': CreateVM_Task, 'duration_secs': 0.77514} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.974321] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1549.975337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.975614] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.976082] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1549.976831] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6579b78-882b-4cc0-a6e4-f29bdce8ee31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.983181] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1549.983181] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52791415-1406-2341-a35b-24dbf4d851e7" [ 1549.983181] env[62508]: _type = "Task" [ 1549.983181] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.995214] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52791415-1406-2341-a35b-24dbf4d851e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.156078] env[62508]: DEBUG oslo_concurrency.lockutils [req-8132167c-268a-4b99-8500-e018cf490016 req-2a8f6dc4-33d0-4867-b375-c7e61d0c4cf9 service nova] Releasing lock "refresh_cache-68d64a06-f752-459c-a152-157893e79bfd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.240259] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.940s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.243048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.587s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.244455] env[62508]: INFO nova.compute.claims [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.263534] env[62508]: INFO nova.scheduler.client.report [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted allocations for instance e156aef5-bb56-4c17-9e7e-9419b672c9cf [ 1550.310230] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776030, 'name': ReconfigVM_Task, 'duration_secs': 0.578918} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.310506] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1550.311341] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17e19590-a593-4ee5-86ec-a0fb754ca158 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.317707] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1550.317707] env[62508]: value = "task-1776031" [ 1550.317707] env[62508]: _type = "Task" [ 1550.317707] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.325532] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776031, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.385499] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.494969] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52791415-1406-2341-a35b-24dbf4d851e7, 'name': SearchDatastore_Task, 'duration_secs': 0.00951} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.495269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.495499] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.495735] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.495885] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.496099] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1550.496368] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-377c34eb-ca82-4049-ab5e-4bad23321ea5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.504322] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1550.504493] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1550.505202] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53e0d1b9-7815-49b1-9b71-8d78acf2c55a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.510279] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1550.510279] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52276712-695a-1949-61d4-935aba7024e3" [ 1550.510279] env[62508]: _type = "Task" [ 1550.510279] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.518033] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52276712-695a-1949-61d4-935aba7024e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.524499] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.524726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.524929] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.525154] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.525331] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.527626] env[62508]: INFO nova.compute.manager [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Terminating instance [ 1550.529155] env[62508]: DEBUG nova.compute.manager [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1550.529351] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1550.530114] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a65c367-35fd-4d18-8159-01196eb9f73b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.537011] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1550.537258] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1639c8f7-a172-4308-af2d-32ffbb94d3e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.543741] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1550.543741] env[62508]: value = "task-1776032" [ 1550.543741] env[62508]: _type = "Task" [ 1550.543741] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.551572] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.772394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97264288-9a62-43f4-a131-9c45d6534eda tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "e156aef5-bb56-4c17-9e7e-9419b672c9cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.402s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.828457] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776031, 'name': Rename_Task, 'duration_secs': 0.15418} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.828769] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.829495] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42169b53-f00f-4c70-a5e4-e15e272f45d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.836429] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1550.836429] env[62508]: value = "task-1776033" [ 1550.836429] env[62508]: _type = "Task" [ 1550.836429] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.844512] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.020681] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52276712-695a-1949-61d4-935aba7024e3, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.021446] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa8fbf9c-757d-42fe-b08e-a348c86b9973 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.027147] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1551.027147] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239dfdc-b103-12b4-21c3-9d374f69e8e1" [ 1551.027147] env[62508]: _type = "Task" [ 1551.027147] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.035654] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239dfdc-b103-12b4-21c3-9d374f69e8e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.053695] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776032, 'name': PowerOffVM_Task, 'duration_secs': 0.341712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.053932] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1551.054152] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1551.054437] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-165826e6-bfbe-49e0-b1b1-9e1849bff5d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.137584] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1551.137781] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1551.137939] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleting the datastore file [datastore1] cdb1ccaf-83b3-48f8-92da-aca2310863ac {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1551.138237] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b4eeaf7-2c94-437c-9c9e-6c520b6c3575 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.144603] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1551.144603] env[62508]: value = "task-1776035" [ 1551.144603] env[62508]: _type = "Task" [ 1551.144603] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.152940] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.347752] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.539628] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5239dfdc-b103-12b4-21c3-9d374f69e8e1, 'name': SearchDatastore_Task, 'duration_secs': 0.01111} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.542690] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.543062] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 68d64a06-f752-459c-a152-157893e79bfd/68d64a06-f752-459c-a152-157893e79bfd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1551.543596] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15cadbd7-a7ad-420f-914f-cbfc6a157784 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.551111] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1551.551111] env[62508]: value = "task-1776036" [ 1551.551111] env[62508]: _type = "Task" [ 1551.551111] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.565507] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.654072] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163bd5d7-65c4-4556-bea3-65aa52c834b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.663158] env[62508]: DEBUG oslo_vmware.api [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3962} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.664150] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1551.664459] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1551.664750] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1551.665044] env[62508]: INFO nova.compute.manager [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1551.665478] env[62508]: DEBUG oslo.service.loopingcall [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1551.668450] env[62508]: DEBUG nova.compute.manager [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1551.668605] env[62508]: DEBUG nova.network.neutron [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1551.672526] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f29ddec-b7b9-425d-a077-289cdfce0d7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.723546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5786de23-f4cc-467e-a0fa-2746e6d13f83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.737431] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eafd77-21f1-43ea-9655-7c24c4148ca4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.759060] env[62508]: DEBUG nova.compute.provider_tree [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.848254] env[62508]: DEBUG oslo_vmware.api [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776033, 'name': PowerOnVM_Task, 'duration_secs': 0.878575} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.849062] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1551.849062] env[62508]: INFO nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1551.849062] env[62508]: DEBUG nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1551.850607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dad03f0-7b16-46ad-b774-ab2da3ccf2f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.021228] env[62508]: DEBUG nova.compute.manager [req-c6b68a99-68a3-42f7-80ed-82f5544f9446 req-761551ed-7be9-4c49-8893-80cbe4d7dcbf service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Received event network-vif-deleted-93b3f95e-7a03-4164-a2b0-8b0c647d4377 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.021344] env[62508]: INFO nova.compute.manager [req-c6b68a99-68a3-42f7-80ed-82f5544f9446 req-761551ed-7be9-4c49-8893-80cbe4d7dcbf service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Neutron deleted interface 93b3f95e-7a03-4164-a2b0-8b0c647d4377; detaching it from the instance and deleting it from the info cache [ 1552.021522] env[62508]: DEBUG nova.network.neutron [req-c6b68a99-68a3-42f7-80ed-82f5544f9446 req-761551ed-7be9-4c49-8893-80cbe4d7dcbf service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.061240] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776036, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.265514] env[62508]: DEBUG nova.scheduler.client.report [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1552.368031] env[62508]: INFO nova.compute.manager [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Took 19.11 seconds to build instance. [ 1552.495783] env[62508]: DEBUG nova.network.neutron [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.526547] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86276bb0-b3ec-4958-8e97-7d1a9c5ea4b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.537190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d158f998-baef-4d8f-8b2b-858dcc900bf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.571625] env[62508]: DEBUG nova.compute.manager [req-c6b68a99-68a3-42f7-80ed-82f5544f9446 req-761551ed-7be9-4c49-8893-80cbe4d7dcbf service nova] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Detach interface failed, port_id=93b3f95e-7a03-4164-a2b0-8b0c647d4377, reason: Instance cdb1ccaf-83b3-48f8-92da-aca2310863ac could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1552.575792] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662563} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.576076] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 68d64a06-f752-459c-a152-157893e79bfd/68d64a06-f752-459c-a152-157893e79bfd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1552.576232] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1552.576496] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90f068a9-d7e3-43f1-be2e-798e5105d23c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.582689] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1552.582689] env[62508]: value = "task-1776037" [ 1552.582689] env[62508]: _type = "Task" [ 1552.582689] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.590729] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.770638] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.772065] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1552.774045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.878s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.774225] env[62508]: DEBUG nova.objects.instance [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1552.869766] env[62508]: DEBUG oslo_concurrency.lockutils [None req-870e3197-e51e-461f-8119-d774ac9184ba tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.628s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.000667] env[62508]: INFO nova.compute.manager [-] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Took 1.33 seconds to deallocate network for instance. [ 1553.096542] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093479} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.096692] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1553.097446] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53e6830-151c-4059-b499-57b0267ff427 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.123782] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 68d64a06-f752-459c-a152-157893e79bfd/68d64a06-f752-459c-a152-157893e79bfd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1553.123782] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-425ffa9f-bd07-48e6-b40c-51a7627af4ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.141958] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1553.141958] env[62508]: value = "task-1776038" [ 1553.141958] env[62508]: _type = "Task" [ 1553.141958] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.150202] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776038, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.279452] env[62508]: DEBUG nova.compute.utils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1553.285802] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1553.286114] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1553.353344] env[62508]: DEBUG nova.policy [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2561183ef9c54615988c33906fc5f84e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce0dd059301e41abb3758625d38e435e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1553.509576] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.653323] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.668333] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Successfully created port: c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1553.790078] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1553.794402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-daf81f2c-47a2-4b02-b2a0-cf37fe5fc1ba tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.795510] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.103s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.795738] env[62508]: DEBUG nova.objects.instance [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lazy-loading 'resources' on Instance uuid 45de6dd5-97f3-4eea-a171-0254a2b37a41 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1554.155404] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.219152] env[62508]: DEBUG nova.compute.manager [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1554.626759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078f6742-3bd4-426e-9c91-000625878cfb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.636227] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7b2921-d5a6-4d80-ac0c-0fea0ff475b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.669134] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3652ff-87fa-42a9-b7c0-ecfeaf4932f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.676844] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776038, 'name': ReconfigVM_Task, 'duration_secs': 1.105344} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.678870] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 68d64a06-f752-459c-a152-157893e79bfd/68d64a06-f752-459c-a152-157893e79bfd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1554.679525] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8311b7b7-ab88-4ec6-9295-bb7c118c0955 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.681958] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e52b03-9f4b-4427-8f9b-04a42fee2014 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.695696] env[62508]: DEBUG nova.compute.provider_tree [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.697984] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1554.697984] env[62508]: value = "task-1776039" [ 1554.697984] env[62508]: _type = "Task" [ 1554.697984] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.705905] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776039, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.738322] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.806378] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1554.831100] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1554.831248] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1554.832042] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1554.832042] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1554.832042] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1554.832042] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1554.832213] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1554.832253] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1554.832836] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1554.832836] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1554.832836] env[62508]: DEBUG nova.virt.hardware [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1554.833727] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d605e0e7-98d5-4e2d-945a-e5cb5b947a1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.842148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506d979b-5e6d-4d1f-b2d5-50a22c039814 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.199866] env[62508]: DEBUG nova.scheduler.client.report [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.212383] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776039, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.216430] env[62508]: DEBUG nova.compute.manager [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Received event network-vif-plugged-c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.216649] env[62508]: DEBUG oslo_concurrency.lockutils [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] Acquiring lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.218167] env[62508]: DEBUG oslo_concurrency.lockutils [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.218167] env[62508]: DEBUG oslo_concurrency.lockutils [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.218167] env[62508]: DEBUG nova.compute.manager [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] No waiting events found dispatching network-vif-plugged-c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1555.218167] env[62508]: WARNING nova.compute.manager [req-e507059c-5dd4-4a9e-b37a-853a00930351 req-eed1d8c0-44f8-46df-8c16-232148c5ad34 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Received unexpected event network-vif-plugged-c969ec53-5930-48a3-bad6-aaa89e2519c6 for instance with vm_state building and task_state spawning. [ 1555.348227] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Successfully updated port: c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1555.707685] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.709774] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.457s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.711509] env[62508]: INFO nova.compute.claims [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1555.719476] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776039, 'name': Rename_Task, 'duration_secs': 0.932256} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.719725] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1555.720026] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d84f22e-6fa5-428e-8212-31b85d8761ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.725909] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1555.725909] env[62508]: value = "task-1776040" [ 1555.725909] env[62508]: _type = "Task" [ 1555.725909] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.734570] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.735533] env[62508]: INFO nova.scheduler.client.report [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleted allocations for instance 45de6dd5-97f3-4eea-a171-0254a2b37a41 [ 1555.849664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.849821] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.849973] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1556.236878] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.244158] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ab86fc55-4438-40fd-8b0d-9c2639f7322d tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "45de6dd5-97f3-4eea-a171-0254a2b37a41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.619s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.396816] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1556.601760] env[62508]: DEBUG nova.network.neutron [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updating instance_info_cache with network_info: [{"id": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "address": "fa:16:3e:7a:87:4f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc969ec53-59", "ovs_interfaceid": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.737807] env[62508]: DEBUG oslo_vmware.api [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776040, 'name': PowerOnVM_Task, 'duration_secs': 0.712574} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.738134] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1556.738340] env[62508]: INFO nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Took 10.49 seconds to spawn the instance on the hypervisor. [ 1556.738522] env[62508]: DEBUG nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1556.739314] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914029da-ab18-474c-a904-5122e2a2ffe0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.830376] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "06baedda-2926-4ec8-a4f6-d62713f48a26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.830643] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.830855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.831044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.831238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.833403] env[62508]: INFO nova.compute.manager [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Terminating instance [ 1556.835114] env[62508]: DEBUG nova.compute.manager [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1556.835315] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.836163] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10661ba7-866b-47cd-8d8c-39ac692751ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.844769] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.845012] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b25bde5f-c473-4f24-bbd1-aa46da83a059 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.853191] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1556.853191] env[62508]: value = "task-1776041" [ 1556.853191] env[62508]: _type = "Task" [ 1556.853191] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.861458] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1776041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.035534] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efc7c09-53e3-4dc1-af6c-1cb50e29a026 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.042840] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9636104-f58b-4c30-9dee-03fa9b6fd56f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.074063] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45458be9-dbd0-4e41-b734-b2e166fe8032 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.082114] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd45f4a-90cf-49a1-a8e6-623580a70543 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.095743] env[62508]: DEBUG nova.compute.provider_tree [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.105683] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.106021] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance network_info: |[{"id": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "address": "fa:16:3e:7a:87:4f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc969ec53-59", "ovs_interfaceid": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1557.106655] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:87:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c969ec53-5930-48a3-bad6-aaa89e2519c6', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1557.114216] env[62508]: DEBUG oslo.service.loopingcall [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.114689] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1557.114924] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-900557ad-886d-44bc-9467-70137d9fa666 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.137440] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1557.137440] env[62508]: value = "task-1776042" [ 1557.137440] env[62508]: _type = "Task" [ 1557.137440] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.147439] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776042, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.258666] env[62508]: INFO nova.compute.manager [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Took 23.67 seconds to build instance. [ 1557.306561] env[62508]: DEBUG nova.compute.manager [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Received event network-changed-c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1557.306561] env[62508]: DEBUG nova.compute.manager [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Refreshing instance network info cache due to event network-changed-c969ec53-5930-48a3-bad6-aaa89e2519c6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1557.306561] env[62508]: DEBUG oslo_concurrency.lockutils [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] Acquiring lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.306735] env[62508]: DEBUG oslo_concurrency.lockutils [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] Acquired lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.306909] env[62508]: DEBUG nova.network.neutron [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Refreshing network info cache for port c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1557.363988] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1776041, 'name': PowerOffVM_Task, 'duration_secs': 0.221212} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.364304] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.364477] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1557.364723] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d947e8f-f53d-4aef-ab75-5577063cdc6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.443899] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1557.444179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1557.444389] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleting the datastore file [datastore1] 06baedda-2926-4ec8-a4f6-d62713f48a26 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1557.444652] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24b994b0-f380-4c98-a32f-f2473707cd7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.451760] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for the task: (returnval){ [ 1557.451760] env[62508]: value = "task-1776044" [ 1557.451760] env[62508]: _type = "Task" [ 1557.451760] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.460198] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1776044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.599039] env[62508]: DEBUG nova.scheduler.client.report [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1557.648236] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776042, 'name': CreateVM_Task, 'duration_secs': 0.372204} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.648433] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1557.649324] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.649496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.649812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.650076] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0f69cd8-70d7-4871-9074-71d92f59ea9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.654527] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1557.654527] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fc451-f67d-702e-c4e2-7c42f53f338d" [ 1557.654527] env[62508]: _type = "Task" [ 1557.654527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.662182] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fc451-f67d-702e-c4e2-7c42f53f338d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.759863] env[62508]: DEBUG oslo_concurrency.lockutils [None req-94201c41-b713-4429-9e2c-c5fb2b8b5c85 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.175s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.966258] env[62508]: DEBUG oslo_vmware.api [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Task: {'id': task-1776044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162869} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.966585] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1557.966794] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1557.966977] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1557.967176] env[62508]: INFO nova.compute.manager [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1557.967423] env[62508]: DEBUG oslo.service.loopingcall [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.967615] env[62508]: DEBUG nova.compute.manager [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1557.967713] env[62508]: DEBUG nova.network.neutron [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1558.046023] env[62508]: DEBUG nova.network.neutron [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updated VIF entry in instance network info cache for port c969ec53-5930-48a3-bad6-aaa89e2519c6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1558.046500] env[62508]: DEBUG nova.network.neutron [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updating instance_info_cache with network_info: [{"id": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "address": "fa:16:3e:7a:87:4f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc969ec53-59", "ovs_interfaceid": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.104891] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.105721] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1558.109622] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.579s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.109950] env[62508]: DEBUG nova.objects.instance [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lazy-loading 'resources' on Instance uuid fd658703-d477-4d21-b0ad-7ff08d4c2f97 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.167715] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fc451-f67d-702e-c4e2-7c42f53f338d, 'name': SearchDatastore_Task, 'duration_secs': 0.010955} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.168057] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.168318] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1558.168601] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.169744] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.169978] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1558.170268] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79376255-983a-4c03-84b4-5a3a32a9b599 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.191783] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1558.191993] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1558.192798] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36561a8f-bb63-4942-a392-44e5d64ba3b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.198985] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1558.198985] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278283a-5545-2817-3f36-ddf978326d36" [ 1558.198985] env[62508]: _type = "Task" [ 1558.198985] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.207788] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278283a-5545-2817-3f36-ddf978326d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.297200] env[62508]: DEBUG nova.compute.manager [req-5a42b8f8-eadd-4803-ad49-d461cacbdb19 req-62e1ff12-3e15-4138-a87e-b28464323609 service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Received event network-vif-deleted-e9750a97-050e-4f74-b663-2e63804efb6f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.297200] env[62508]: INFO nova.compute.manager [req-5a42b8f8-eadd-4803-ad49-d461cacbdb19 req-62e1ff12-3e15-4138-a87e-b28464323609 service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Neutron deleted interface e9750a97-050e-4f74-b663-2e63804efb6f; detaching it from the instance and deleting it from the info cache [ 1558.297200] env[62508]: DEBUG nova.network.neutron [req-5a42b8f8-eadd-4803-ad49-d461cacbdb19 req-62e1ff12-3e15-4138-a87e-b28464323609 service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.506722] env[62508]: DEBUG nova.compute.manager [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.507661] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0c852c-51d1-431b-87f7-6c2f75e77221 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.549216] env[62508]: DEBUG oslo_concurrency.lockutils [req-39dd2815-3833-4cbe-9879-8e6913b0ea49 req-74ada8f7-d6cd-485b-a2fc-34e0d01bca3b service nova] Releasing lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.612882] env[62508]: DEBUG nova.compute.utils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1558.617889] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1558.618119] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1558.662941] env[62508]: DEBUG nova.policy [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0eb49af4c067424b95247c46b69af874', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dda7c8e8d4e746219546ad1ec5c37643', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1558.710913] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5278283a-5545-2817-3f36-ddf978326d36, 'name': SearchDatastore_Task, 'duration_secs': 0.015374} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.714850] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca556578-e1c6-443a-8c19-3e872fd7bfdb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.720396] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1558.720396] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a3562a-28f3-a896-d545-a44b3f76278c" [ 1558.720396] env[62508]: _type = "Task" [ 1558.720396] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.731020] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a3562a-28f3-a896-d545-a44b3f76278c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.773166] env[62508]: DEBUG nova.network.neutron [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.805851] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b736a1f-9686-47dd-8891-466763e494d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.819690] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caece12-dc71-461e-9e4a-ce01f23a1fe8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.860595] env[62508]: DEBUG nova.compute.manager [req-5a42b8f8-eadd-4803-ad49-d461cacbdb19 req-62e1ff12-3e15-4138-a87e-b28464323609 service nova] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Detach interface failed, port_id=e9750a97-050e-4f74-b663-2e63804efb6f, reason: Instance 06baedda-2926-4ec8-a4f6-d62713f48a26 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1558.993075] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242682a4-e9cd-4c5e-be86-fe248cd88c27 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.000931] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b866c1-7644-4044-a368-963b07ea9156 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.008658] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Successfully created port: 907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1559.036332] env[62508]: INFO nova.compute.manager [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] instance snapshotting [ 1559.039330] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfcd813-078f-4ade-baca-1c3e966846d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.042501] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0052493-d7c7-4460-b93b-a814548b2b6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.063526] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40be6c87-445c-433e-a573-d7f7e16486cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.068419] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff09efa5-29f8-4662-9efd-d895b0beee4a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.082361] env[62508]: DEBUG nova.compute.provider_tree [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.115728] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1559.233263] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a3562a-28f3-a896-d545-a44b3f76278c, 'name': SearchDatastore_Task, 'duration_secs': 0.010083} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.233530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.233794] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1559.234071] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01658de8-e3fb-43de-816c-97c59b4a262c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.240334] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1559.240334] env[62508]: value = "task-1776045" [ 1559.240334] env[62508]: _type = "Task" [ 1559.240334] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.248506] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.276983] env[62508]: INFO nova.compute.manager [-] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Took 1.31 seconds to deallocate network for instance. [ 1559.585536] env[62508]: DEBUG nova.scheduler.client.report [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1559.590120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1559.590292] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c5836e51-b952-4ee3-bb3b-871cee2dd059 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.598634] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1559.598634] env[62508]: value = "task-1776046" [ 1559.598634] env[62508]: _type = "Task" [ 1559.598634] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.607545] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776046, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.750691] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776045, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.783019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.093020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.094289] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.825s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.094526] env[62508]: DEBUG nova.objects.instance [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'resources' on Instance uuid 38289797-ecf5-4207-a164-d70228e4411d {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.109828] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.113808] env[62508]: INFO nova.scheduler.client.report [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Deleted allocations for instance fd658703-d477-4d21-b0ad-7ff08d4c2f97 [ 1560.125490] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1560.147836] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1560.148111] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1560.148281] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1560.148470] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1560.148687] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1560.148856] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1560.149079] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1560.149258] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1560.149844] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1560.150202] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1560.150410] env[62508]: DEBUG nova.virt.hardware [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1560.151414] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c689b0-7112-4fda-9ddf-5281f902f0fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.160318] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4de4ff6-3eb8-43ae-8167-6b4033d5ea3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.251947] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601289} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.252112] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1560.252360] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1560.252622] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37983510-c2b1-4ed9-a9f3-5cd6ef250d2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.259454] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1560.259454] env[62508]: value = "task-1776047" [ 1560.259454] env[62508]: _type = "Task" [ 1560.259454] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.267528] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.583727] env[62508]: DEBUG nova.compute.manager [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Received event network-vif-plugged-907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1560.583861] env[62508]: DEBUG oslo_concurrency.lockutils [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] Acquiring lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.584083] env[62508]: DEBUG oslo_concurrency.lockutils [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.584382] env[62508]: DEBUG oslo_concurrency.lockutils [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.584567] env[62508]: DEBUG nova.compute.manager [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] No waiting events found dispatching network-vif-plugged-907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1560.584761] env[62508]: WARNING nova.compute.manager [req-925b492f-a98f-43f1-b5e9-6bdcbf0bff3b req-747b969b-8dea-466a-862e-62e2285379da service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Received unexpected event network-vif-plugged-907829cf-4eb7-49fb-92b0-0135b138a80a for instance with vm_state building and task_state spawning. [ 1560.612714] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.624066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-864ceccb-8c09-40e2-9d21-488cde427ab1 tempest-ServerAddressesNegativeTestJSON-2011882184 tempest-ServerAddressesNegativeTestJSON-2011882184-project-member] Lock "fd658703-d477-4d21-b0ad-7ff08d4c2f97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.335s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.773022] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082136} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.775536] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1560.777124] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf1cf5c-fed8-48b9-8bba-326d89c97b6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.800657] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.803641] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6e37ac7-cbee-46da-be01-0c4907963b72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.829024] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1560.829024] env[62508]: value = "task-1776048" [ 1560.829024] env[62508]: _type = "Task" [ 1560.829024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.837722] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.854651] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Successfully updated port: 907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1560.992020] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67892c1b-8638-4b76-93a2-53a80017b376 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.998682] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e993aa8e-bbde-4cd9-ac27-2cf58d6a7b88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.029732] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f9b6e3-72df-45ea-9bc8-7b4692283d7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.037700] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91eca1fa-d32c-4fbc-9c8f-cb56a1c5215e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.053678] env[62508]: DEBUG nova.compute.provider_tree [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.113902] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.336864] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776048, 'name': ReconfigVM_Task, 'duration_secs': 0.280586} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.337199] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1561.338198] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-470cede5-b1a8-44be-8898-e8dbb19f1328 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.345392] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1561.345392] env[62508]: value = "task-1776049" [ 1561.345392] env[62508]: _type = "Task" [ 1561.345392] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.352990] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776049, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.355678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.355812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquired lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.355962] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1561.557600] env[62508]: DEBUG nova.scheduler.client.report [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1561.567042] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.567042] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.567042] env[62508]: INFO nova.compute.manager [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Shelving [ 1561.613335] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776046, 'name': CreateSnapshot_Task, 'duration_secs': 1.521198} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.613739] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1561.614565] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f27d9c-7bcf-4df0-982a-cf72fedf9e1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.855973] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776049, 'name': Rename_Task, 'duration_secs': 0.138437} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.856358] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1561.856682] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b9ae66c-e0a7-4918-ac94-4c65523f3c4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.865037] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1561.865037] env[62508]: value = "task-1776050" [ 1561.865037] env[62508]: _type = "Task" [ 1561.865037] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.873875] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.898977] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1562.066931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.069242] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.657s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.070738] env[62508]: INFO nova.compute.claims [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.080414] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1562.080948] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91c238f6-3a0a-4d11-aae7-7f6a3e56ec4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.083739] env[62508]: DEBUG nova.network.neutron [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Updating instance_info_cache with network_info: [{"id": "907829cf-4eb7-49fb-92b0-0135b138a80a", "address": "fa:16:3e:80:8c:55", "network": {"id": "07b525d6-e763-4d5e-a0f3-8f1c480a5f4d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-866085524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dda7c8e8d4e746219546ad1ec5c37643", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907829cf-4e", "ovs_interfaceid": "907829cf-4eb7-49fb-92b0-0135b138a80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.090554] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1562.090554] env[62508]: value = "task-1776051" [ 1562.090554] env[62508]: _type = "Task" [ 1562.090554] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.101050] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.102714] env[62508]: INFO nova.scheduler.client.report [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocations for instance 38289797-ecf5-4207-a164-d70228e4411d [ 1562.136063] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1562.137285] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-842c34dd-f140-4080-9fe2-f27178eb7f1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.152498] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1562.152498] env[62508]: value = "task-1776052" [ 1562.152498] env[62508]: _type = "Task" [ 1562.152498] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.162860] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776052, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.377846] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776050, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.587950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Releasing lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.588374] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Instance network_info: |[{"id": "907829cf-4eb7-49fb-92b0-0135b138a80a", "address": "fa:16:3e:80:8c:55", "network": {"id": "07b525d6-e763-4d5e-a0f3-8f1c480a5f4d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-866085524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dda7c8e8d4e746219546ad1ec5c37643", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907829cf-4e", "ovs_interfaceid": "907829cf-4eb7-49fb-92b0-0135b138a80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1562.589418] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:8c:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '907829cf-4eb7-49fb-92b0-0135b138a80a', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1562.598222] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Creating folder: Project (dda7c8e8d4e746219546ad1ec5c37643). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.598923] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c3606d2-b4a1-4dff-b335-0639b568251e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.613562] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776051, 'name': PowerOffVM_Task, 'duration_secs': 0.184947} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.615207] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ee6703ac-0e39-4691-982a-b9d95e9e8ba5 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "38289797-ecf5-4207-a164-d70228e4411d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.950s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.620278] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1562.620278] env[62508]: DEBUG nova.compute.manager [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Received event network-changed-907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1562.620278] env[62508]: DEBUG nova.compute.manager [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Refreshing instance network info cache due to event network-changed-907829cf-4eb7-49fb-92b0-0135b138a80a. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1562.620278] env[62508]: DEBUG oslo_concurrency.lockutils [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] Acquiring lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.620426] env[62508]: DEBUG oslo_concurrency.lockutils [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] Acquired lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.620668] env[62508]: DEBUG nova.network.neutron [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Refreshing network info cache for port 907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.621984] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Created folder: Project (dda7c8e8d4e746219546ad1ec5c37643) in parent group-v368536. [ 1562.622220] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Creating folder: Instances. Parent ref: group-v368721. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.623368] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bfa2dd-7499-4da7-bfbb-6c3ff32228dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.626717] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8b5d4a2-b54d-4e8f-b9eb-843429a05100 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.647667] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7ad922-a88d-4218-b731-4ddafb59e28e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.650835] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Created folder: Instances in parent group-v368721. [ 1562.651132] env[62508]: DEBUG oslo.service.loopingcall [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1562.651334] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1562.651939] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eaf29f7-f3af-47ae-902d-976982cbee38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.679150] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776052, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.680314] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1562.680314] env[62508]: value = "task-1776055" [ 1562.680314] env[62508]: _type = "Task" [ 1562.680314] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.688110] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776055, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.875583] env[62508]: DEBUG oslo_vmware.api [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776050, 'name': PowerOnVM_Task, 'duration_secs': 0.597605} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.875889] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1562.876091] env[62508]: INFO nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Took 8.07 seconds to spawn the instance on the hypervisor. [ 1562.876289] env[62508]: DEBUG nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1562.877135] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3f1e25-e3c3-4af7-bf97-2187132a489b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.179786] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1563.181180] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-09629842-068d-4374-b8c0-194890cf9ff9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.188947] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776052, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.202630] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1563.202630] env[62508]: value = "task-1776056" [ 1563.202630] env[62508]: _type = "Task" [ 1563.202630] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.216428] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776055, 'name': CreateVM_Task, 'duration_secs': 0.482243} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.217715] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1563.218108] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.218346] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.218775] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1563.223309] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b0a0532-fa8a-4e75-a823-9b60c51e8ad4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.225805] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776056, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.229098] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1563.229098] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c8e5e8-848c-ff52-9447-02391fccdc1d" [ 1563.229098] env[62508]: _type = "Task" [ 1563.229098] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.243149] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c8e5e8-848c-ff52-9447-02391fccdc1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.399121] env[62508]: INFO nova.compute.manager [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Took 25.76 seconds to build instance. [ 1563.451476] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53983bae-b0c2-4d5c-9d16-b3786f2a3121 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.460873] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9caa5bb-c944-4ba1-ad13-4d091945264d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.499628] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc82331-2f14-43fb-95bc-1aea34698d1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.513803] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae7211b-2c2f-45af-9782-9371dbaac5c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.532251] env[62508]: DEBUG nova.compute.provider_tree [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1563.569829] env[62508]: DEBUG nova.network.neutron [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Updated VIF entry in instance network info cache for port 907829cf-4eb7-49fb-92b0-0135b138a80a. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1563.570305] env[62508]: DEBUG nova.network.neutron [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Updating instance_info_cache with network_info: [{"id": "907829cf-4eb7-49fb-92b0-0135b138a80a", "address": "fa:16:3e:80:8c:55", "network": {"id": "07b525d6-e763-4d5e-a0f3-8f1c480a5f4d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-866085524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dda7c8e8d4e746219546ad1ec5c37643", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907829cf-4e", "ovs_interfaceid": "907829cf-4eb7-49fb-92b0-0135b138a80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.681636] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776052, 'name': CloneVM_Task, 'duration_secs': 1.524169} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.681974] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Created linked-clone VM from snapshot [ 1563.682838] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed65a766-01d3-4d8f-8fbb-e61f32a8d307 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.691114] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Uploading image cce07762-d034-41a0-9778-c0b0dab30ae0 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1563.712149] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776056, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.719739] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1563.719739] env[62508]: value = "vm-368720" [ 1563.719739] env[62508]: _type = "VirtualMachine" [ 1563.719739] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1563.720008] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-029571fa-a05d-45d5-8370-36d6ba6ba7f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.728151] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease: (returnval){ [ 1563.728151] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5231b272-babf-fd3b-db5e-b83818948a15" [ 1563.728151] env[62508]: _type = "HttpNfcLease" [ 1563.728151] env[62508]: } obtained for exporting VM: (result){ [ 1563.728151] env[62508]: value = "vm-368720" [ 1563.728151] env[62508]: _type = "VirtualMachine" [ 1563.728151] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1563.728424] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the lease: (returnval){ [ 1563.728424] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5231b272-babf-fd3b-db5e-b83818948a15" [ 1563.728424] env[62508]: _type = "HttpNfcLease" [ 1563.728424] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1563.737942] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1563.737942] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5231b272-babf-fd3b-db5e-b83818948a15" [ 1563.737942] env[62508]: _type = "HttpNfcLease" [ 1563.737942] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1563.741588] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c8e5e8-848c-ff52-9447-02391fccdc1d, 'name': SearchDatastore_Task, 'duration_secs': 0.033767} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.741823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.742069] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1563.742307] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.742461] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.742697] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.742903] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d553952-335f-493b-bf2e-f5fade7a355b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.752840] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.752950] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1563.753660] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6983668a-87c8-4708-a993-b49491d43d0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.759117] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1563.759117] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac1fc7-ac8d-3ece-dde8-4f4517381786" [ 1563.759117] env[62508]: _type = "Task" [ 1563.759117] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.767116] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac1fc7-ac8d-3ece-dde8-4f4517381786, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.901486] env[62508]: DEBUG oslo_concurrency.lockutils [None req-63afc814-8c85-4313-a6f9-fea0a293e04d tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.283s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.036862] env[62508]: DEBUG nova.scheduler.client.report [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1564.073248] env[62508]: DEBUG oslo_concurrency.lockutils [req-efe9a9f3-8f49-458b-8756-7b2d37100722 req-534b3457-cc40-4bc3-8154-e1dfe4259cad service nova] Releasing lock "refresh_cache-4bf92157-1d8c-4c3c-bc61-adb6d26bff54" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.217787] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776056, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.237723] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1564.237723] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5231b272-babf-fd3b-db5e-b83818948a15" [ 1564.237723] env[62508]: _type = "HttpNfcLease" [ 1564.237723] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1564.238116] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1564.238116] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5231b272-babf-fd3b-db5e-b83818948a15" [ 1564.238116] env[62508]: _type = "HttpNfcLease" [ 1564.238116] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1564.239324] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d92e66-887b-40d4-9dd5-0fa87c2c13dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.250307] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1564.250531] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1564.332377] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac1fc7-ac8d-3ece-dde8-4f4517381786, 'name': SearchDatastore_Task, 'duration_secs': 0.032345} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.333225] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4e66c3e-c6c1-454c-b1d6-4f2811006e91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.338876] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1564.338876] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cff433-4956-84d8-cdd4-c531c35abd5a" [ 1564.338876] env[62508]: _type = "Task" [ 1564.338876] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.347300] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cff433-4956-84d8-cdd4-c531c35abd5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.381692] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d4e298d9-f697-4a3f-9e4d-4b1e5330cac6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.453455] env[62508]: DEBUG nova.compute.manager [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Received event network-changed-c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1564.453455] env[62508]: DEBUG nova.compute.manager [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Refreshing instance network info cache due to event network-changed-c969ec53-5930-48a3-bad6-aaa89e2519c6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1564.453455] env[62508]: DEBUG oslo_concurrency.lockutils [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] Acquiring lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.453455] env[62508]: DEBUG oslo_concurrency.lockutils [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] Acquired lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.453609] env[62508]: DEBUG nova.network.neutron [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Refreshing network info cache for port c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1564.542760] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.545467] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1564.547127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.118s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.547732] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.547732] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1564.548097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.163s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.548287] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.550832] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.041s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.551165] env[62508]: DEBUG nova.objects.instance [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lazy-loading 'resources' on Instance uuid cdb1ccaf-83b3-48f8-92da-aca2310863ac {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1564.553542] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d0b3a5-d632-419d-b3dc-eb21022fb0cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.567168] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa24730-bd34-4f30-b79a-edc5079a9863 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.584804] env[62508]: INFO nova.scheduler.client.report [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleted allocations for instance e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6 [ 1564.586331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a43f80-6e72-4c07-8bd7-18e8fd936b95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.599755] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57316c4-1327-4c0b-a717-cb204e298a58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.630629] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178709MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1564.630629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.714198] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776056, 'name': CreateSnapshot_Task, 'duration_secs': 1.101814} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.714569] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1564.715597] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0461ce6c-bbc6-4145-a5be-6c545ba5511f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.849782] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cff433-4956-84d8-cdd4-c531c35abd5a, 'name': SearchDatastore_Task, 'duration_secs': 0.028934} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.850469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.850812] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4bf92157-1d8c-4c3c-bc61-adb6d26bff54/4bf92157-1d8c-4c3c-bc61-adb6d26bff54.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.850975] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a507430b-4179-4b85-844c-e90ae3f52ec2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.858349] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1564.858349] env[62508]: value = "task-1776058" [ 1564.858349] env[62508]: _type = "Task" [ 1564.858349] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.866196] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.062637] env[62508]: DEBUG nova.compute.utils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.065585] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1565.065872] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.097443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d3949383-fb65-4006-8bbe-655156a833e0 tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.644s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.162600] env[62508]: DEBUG nova.policy [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e5a99cd463e41a29fdcca4e47509534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87d56d1936b84c7080824ab21b1c05e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1565.238636] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1565.239291] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3e6c1d2e-3f19-45a8-bf18-1139d1b257ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.248652] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1565.248652] env[62508]: value = "task-1776059" [ 1565.248652] env[62508]: _type = "Task" [ 1565.248652] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.253582] env[62508]: DEBUG nova.network.neutron [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updated VIF entry in instance network info cache for port c969ec53-5930-48a3-bad6-aaa89e2519c6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1565.254050] env[62508]: DEBUG nova.network.neutron [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updating instance_info_cache with network_info: [{"id": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "address": "fa:16:3e:7a:87:4f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc969ec53-59", "ovs_interfaceid": "c969ec53-5930-48a3-bad6-aaa89e2519c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.263336] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.371797] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.443957] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.444342] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.510215] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c347ea9d-2451-47b1-a84f-07258ccfacb7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.519794] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d13e2ce-0982-41cb-9173-4d235c62956f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.556950] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b7f125-d0d6-4ade-a28b-08bc85009bf5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.565320] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1a7b72-058b-44d2-a4e9-ac0814e82c5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.571358] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1565.582680] env[62508]: DEBUG nova.compute.provider_tree [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.627606] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Successfully created port: 46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1565.760345] env[62508]: DEBUG oslo_concurrency.lockutils [req-32cb85fb-80dc-417a-a72e-b425bed07c7a req-e59e5f60-4d5c-409e-b97f-ad2503f3cb81 service nova] Releasing lock "refresh_cache-4d24bacc-48c4-4649-bb29-fcae2cf77782" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1565.761201] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.870961] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.941797] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.941968] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.942243] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.943411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.943672] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.947353] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1565.950955] env[62508]: INFO nova.compute.manager [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Terminating instance [ 1565.955707] env[62508]: DEBUG nova.compute.manager [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1565.955707] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1565.955707] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf0aba5-646c-44a8-8830-50088ce88c4a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.964953] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.965302] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7fa2622-5fed-4788-97bd-33e76c822cb4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.978694] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1565.978694] env[62508]: value = "task-1776060" [ 1565.978694] env[62508]: _type = "Task" [ 1565.978694] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.989730] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.089814] env[62508]: DEBUG nova.scheduler.client.report [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1566.261468] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.370906] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.478498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.489147] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.594859] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1566.597360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.599718] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.861s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.619361] env[62508]: INFO nova.scheduler.client.report [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted allocations for instance cdb1ccaf-83b3-48f8-92da-aca2310863ac [ 1566.627197] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1566.627446] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1566.627626] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1566.627827] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1566.628046] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1566.628163] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1566.628606] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1566.629149] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1566.629234] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1566.629551] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1566.629792] env[62508]: DEBUG nova.virt.hardware [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1566.631063] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fffbce0-49c9-45cd-af6d-ab303bbf4431 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.640019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ae78c4-63e9-4b46-9d76-8f94b8d96ad7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.763037] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.871124] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.989291] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.104485] env[62508]: INFO nova.compute.claims [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1567.129723] env[62508]: DEBUG oslo_concurrency.lockutils [None req-757116f6-d220-4ccf-9c3d-863f416da969 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "cdb1ccaf-83b3-48f8-92da-aca2310863ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.605s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.262873] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.372253] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.491401] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.611430] env[62508]: INFO nova.compute.resource_tracker [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating resource usage from migration b20f235e-fe4d-4b55-9bdd-2acccc8f2871 [ 1567.668232] env[62508]: DEBUG nova.compute.manager [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Received event network-vif-plugged-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1567.668505] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] Acquiring lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.670626] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] Lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.670626] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] Lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.670626] env[62508]: DEBUG nova.compute.manager [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] No waiting events found dispatching network-vif-plugged-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1567.670626] env[62508]: WARNING nova.compute.manager [req-7c937055-51f6-422d-a662-3a3a7d2bbe16 req-4dde397c-fc72-4524-be33-3f664ed270c4 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Received unexpected event network-vif-plugged-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e for instance with vm_state building and task_state spawning. [ 1567.764633] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.768587] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Successfully updated port: 46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1567.877996] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.936309] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.936549] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.044606] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d12540-6246-4383-8627-e6746fb5201e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.044606] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86da603d-f03c-4f11-87a2-7fcd0a7b4e88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.044606] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.059856] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62003470-ee8e-4848-88f0-85efe5d76f44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.067905] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f795d941-0b7f-4c7a-9a05-76c183ee73aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.081246] env[62508]: DEBUG nova.compute.provider_tree [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.263324] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.274166] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.274551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquired lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.274551] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.372933] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.439379] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1568.493777] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.584630] env[62508]: DEBUG nova.scheduler.client.report [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.765734] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776059, 'name': CloneVM_Task, 'duration_secs': 3.047295} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.765999] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Created linked-clone VM from snapshot [ 1568.766821] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6ce8e-40d0-4cc3-8c29-56fc81b3b276 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.774051] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Uploading image d17b0134-9e41-4671-a49a-a34df9610594 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1568.797913] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1568.797913] env[62508]: value = "vm-368725" [ 1568.797913] env[62508]: _type = "VirtualMachine" [ 1568.797913] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1568.798336] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8d509527-55d4-407a-aab7-39123d694645 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.806368] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lease: (returnval){ [ 1568.806368] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ae4f9-9f5f-79d3-a684-78f19d22b0c1" [ 1568.806368] env[62508]: _type = "HttpNfcLease" [ 1568.806368] env[62508]: } obtained for exporting VM: (result){ [ 1568.806368] env[62508]: value = "vm-368725" [ 1568.806368] env[62508]: _type = "VirtualMachine" [ 1568.806368] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1568.806747] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the lease: (returnval){ [ 1568.806747] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ae4f9-9f5f-79d3-a684-78f19d22b0c1" [ 1568.806747] env[62508]: _type = "HttpNfcLease" [ 1568.806747] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1568.807676] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1568.815780] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1568.815780] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ae4f9-9f5f-79d3-a684-78f19d22b0c1" [ 1568.815780] env[62508]: _type = "HttpNfcLease" [ 1568.815780] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1568.874305] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.963501] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.995465] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776060, 'name': PowerOffVM_Task, 'duration_secs': 2.591206} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.995791] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1568.995967] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1568.996937] env[62508]: DEBUG nova.network.neutron [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Updating instance_info_cache with network_info: [{"id": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "address": "fa:16:3e:07:48:7b", "network": {"id": "28335ce1-23c8-4968-891b-cae3b9487fae", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-399922061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87d56d1936b84c7080824ab21b1c05e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b30a18-08", "ovs_interfaceid": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.998098] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4001fc7-8b59-4719-8342-743fa2f948ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.092494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.493s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.092759] env[62508]: INFO nova.compute.manager [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Migrating [ 1569.101291] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.318s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.101662] env[62508]: DEBUG nova.objects.instance [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lazy-loading 'resources' on Instance uuid 06baedda-2926-4ec8-a4f6-d62713f48a26 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1569.246129] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1569.246376] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1569.246582] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleting the datastore file [datastore1] 868cf942-f348-488d-b00a-af4c8b5efda5 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.246941] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-989d59ff-d5e2-4580-81a5-4f3b8d514a58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.256552] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for the task: (returnval){ [ 1569.256552] env[62508]: value = "task-1776063" [ 1569.256552] env[62508]: _type = "Task" [ 1569.256552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.265726] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.315254] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1569.315254] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ae4f9-9f5f-79d3-a684-78f19d22b0c1" [ 1569.315254] env[62508]: _type = "HttpNfcLease" [ 1569.315254] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1569.315552] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1569.315552] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528ae4f9-9f5f-79d3-a684-78f19d22b0c1" [ 1569.315552] env[62508]: _type = "HttpNfcLease" [ 1569.315552] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1569.316333] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af2c502-37a4-4cbd-9e30-5562a96ca460 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.324214] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1569.324420] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1569.395505] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.450117] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-98c7373a-9448-420a-9f63-de8d6cbc5a5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.501370] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Releasing lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.501765] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Instance network_info: |[{"id": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "address": "fa:16:3e:07:48:7b", "network": {"id": "28335ce1-23c8-4968-891b-cae3b9487fae", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-399922061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87d56d1936b84c7080824ab21b1c05e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b30a18-08", "ovs_interfaceid": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1569.502243] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:48:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6db039c-542c-4544-a57d-ddcc6c1e8e45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46b30a18-08fa-41cb-93bd-8be3e3a0fa1e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1569.510527] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Creating folder: Project (87d56d1936b84c7080824ab21b1c05e9). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.510859] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-351852cd-9311-47c0-bf34-8bd8576316d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.524839] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Created folder: Project (87d56d1936b84c7080824ab21b1c05e9) in parent group-v368536. [ 1569.525192] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Creating folder: Instances. Parent ref: group-v368726. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.525446] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1eae89dc-31f3-45c7-ab72-366effb1e8fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.539523] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Created folder: Instances in parent group-v368726. [ 1569.539854] env[62508]: DEBUG oslo.service.loopingcall [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.540131] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1569.540415] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faa1aab3-f805-4a7d-8313-6d3157b05530 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.570258] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1569.570258] env[62508]: value = "task-1776066" [ 1569.570258] env[62508]: _type = "Task" [ 1569.570258] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.582202] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776066, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.615196] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.615706] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.615706] env[62508]: DEBUG nova.network.neutron [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.699206] env[62508]: DEBUG nova.compute.manager [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Received event network-changed-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1569.699445] env[62508]: DEBUG nova.compute.manager [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Refreshing instance network info cache due to event network-changed-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1569.699813] env[62508]: DEBUG oslo_concurrency.lockutils [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] Acquiring lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.699933] env[62508]: DEBUG oslo_concurrency.lockutils [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] Acquired lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.700129] env[62508]: DEBUG nova.network.neutron [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Refreshing network info cache for port 46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.767579] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.894648] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776058, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.718634} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.895053] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4bf92157-1d8c-4c3c-bc61-adb6d26bff54/4bf92157-1d8c-4c3c-bc61-adb6d26bff54.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1569.896039] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1569.896039] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb17d161-99e9-4566-a99f-bfe3c6683a97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.903392] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1569.903392] env[62508]: value = "task-1776067" [ 1569.903392] env[62508]: _type = "Task" [ 1569.903392] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.915677] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.960835] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306f72ff-c2e0-4afa-b9f4-ca040e1a4520 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.969692] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b490926c-96bf-4162-a63d-ca1dac650f12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.003720] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd7702e-0f92-42a7-964b-03cfdcf15628 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.012188] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91914436-6f6d-48d9-b0ec-39c1d5dbe09d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.030409] env[62508]: DEBUG nova.compute.provider_tree [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.083936] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776066, 'name': CreateVM_Task, 'duration_secs': 0.484173} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.084451] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1570.085523] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.085686] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.086290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1570.086860] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d9c6200-18c3-4db4-8448-27a5b1a0d549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.093480] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1570.093480] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238b968-270e-d5ba-2081-2d8e22bbe99f" [ 1570.093480] env[62508]: _type = "Task" [ 1570.093480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.102535] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238b968-270e-d5ba-2081-2d8e22bbe99f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.272805] env[62508]: DEBUG oslo_vmware.api [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Task: {'id': task-1776063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.530807} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.273380] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.273551] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1570.273661] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1570.273891] env[62508]: INFO nova.compute.manager [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Took 4.32 seconds to destroy the instance on the hypervisor. [ 1570.274203] env[62508]: DEBUG oslo.service.loopingcall [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.274415] env[62508]: DEBUG nova.compute.manager [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1570.274552] env[62508]: DEBUG nova.network.neutron [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1570.416064] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079338} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.417700] env[62508]: DEBUG nova.network.neutron [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.419271] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1570.420837] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d72696-0d21-4d93-affa-75e69a0576b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.447164] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 4bf92157-1d8c-4c3c-bc61-adb6d26bff54/4bf92157-1d8c-4c3c-bc61-adb6d26bff54.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1570.447836] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17f40605-b033-4ef8-a3c3-19d1b5d72cb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.470599] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1570.470599] env[62508]: value = "task-1776068" [ 1570.470599] env[62508]: _type = "Task" [ 1570.470599] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.483979] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776068, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.533344] env[62508]: DEBUG nova.scheduler.client.report [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1570.607402] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5238b968-270e-d5ba-2081-2d8e22bbe99f, 'name': SearchDatastore_Task, 'duration_secs': 0.011917} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.607831] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.608109] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1570.608497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.608688] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.608914] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.609255] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-853ed09a-636b-4b15-be32-707feecb7da8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.619240] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.619378] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1570.620225] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58f35cb2-448d-4fd7-89df-03023b67a9b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.626242] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1570.626242] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a1537-2697-339d-f168-6834cdc846e0" [ 1570.626242] env[62508]: _type = "Task" [ 1570.626242] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.635188] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a1537-2697-339d-f168-6834cdc846e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.636130] env[62508]: DEBUG nova.network.neutron [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Updated VIF entry in instance network info cache for port 46b30a18-08fa-41cb-93bd-8be3e3a0fa1e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.636806] env[62508]: DEBUG nova.network.neutron [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Updating instance_info_cache with network_info: [{"id": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "address": "fa:16:3e:07:48:7b", "network": {"id": "28335ce1-23c8-4968-891b-cae3b9487fae", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-399922061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87d56d1936b84c7080824ab21b1c05e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6db039c-542c-4544-a57d-ddcc6c1e8e45", "external-id": "nsx-vlan-transportzone-810", "segmentation_id": 810, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b30a18-08", "ovs_interfaceid": "46b30a18-08fa-41cb-93bd-8be3e3a0fa1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.735237] env[62508]: DEBUG nova.compute.manager [req-03c51db4-82c3-49ac-9997-7159fadf036e req-617f9def-22ff-4764-99f1-62f88c4cf419 service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Received event network-vif-deleted-f7f2eb4c-dab6-44e0-8f5e-7013ddd13683 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.735598] env[62508]: INFO nova.compute.manager [req-03c51db4-82c3-49ac-9997-7159fadf036e req-617f9def-22ff-4764-99f1-62f88c4cf419 service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Neutron deleted interface f7f2eb4c-dab6-44e0-8f5e-7013ddd13683; detaching it from the instance and deleting it from the info cache [ 1570.735811] env[62508]: DEBUG nova.network.neutron [req-03c51db4-82c3-49ac-9997-7159fadf036e req-617f9def-22ff-4764-99f1-62f88c4cf419 service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.921329] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.984524] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776068, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.039655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.938s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.042625] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.413s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.090935] env[62508]: INFO nova.scheduler.client.report [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Deleted allocations for instance 06baedda-2926-4ec8-a4f6-d62713f48a26 [ 1571.136654] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525a1537-2697-339d-f168-6834cdc846e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.139764] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef37b773-c83e-45af-b385-3913a7cd0844 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.142619] env[62508]: DEBUG oslo_concurrency.lockutils [req-d247e4dd-905c-4b82-95e9-b771f483daba req-6dc2cd13-4e0e-44c7-afa7-40bbff52ab54 service nova] Releasing lock "refresh_cache-29223197-9a79-45cc-baa6-3deb731ec08e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.146960] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1571.146960] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1e64b-01f1-00ed-e685-f40b4722055e" [ 1571.146960] env[62508]: _type = "Task" [ 1571.146960] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.155678] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1e64b-01f1-00ed-e685-f40b4722055e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.213868] env[62508]: DEBUG nova.network.neutron [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.238832] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3cdee17-8faa-4633-b924-6f1902469759 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.249567] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae82b992-1508-4438-aa58-acd1dd78c89e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.289525] env[62508]: DEBUG nova.compute.manager [req-03c51db4-82c3-49ac-9997-7159fadf036e req-617f9def-22ff-4764-99f1-62f88c4cf419 service nova] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Detach interface failed, port_id=f7f2eb4c-dab6-44e0-8f5e-7013ddd13683, reason: Instance 868cf942-f348-488d-b00a-af4c8b5efda5 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1571.487506] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776068, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.599248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-12b55cec-a24c-43ed-b651-97f8c58ed22f tempest-ListImageFiltersTestJSON-402242 tempest-ListImageFiltersTestJSON-402242-project-member] Lock "06baedda-2926-4ec8-a4f6-d62713f48a26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.768s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.658878] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f1e64b-01f1-00ed-e685-f40b4722055e, 'name': SearchDatastore_Task, 'duration_secs': 0.010846} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.659180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.659469] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 29223197-9a79-45cc-baa6-3deb731ec08e/29223197-9a79-45cc-baa6-3deb731ec08e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1571.659753] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c7388e3-a615-43ea-b79e-181418b795f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.667641] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1571.667641] env[62508]: value = "task-1776069" [ 1571.667641] env[62508]: _type = "Task" [ 1571.667641] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.675741] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.717238] env[62508]: INFO nova.compute.manager [-] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Took 1.44 seconds to deallocate network for instance. [ 1571.986983] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776068, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.059301] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Applying migration context for instance 80a9e17e-4095-498c-80c8-200bfb4f3d1f as it has an incoming, in-progress migration b20f235e-fe4d-4b55-9bdd-2acccc8f2871. Migration status is migrating {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1572.062409] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating resource usage from migration b20f235e-fe4d-4b55-9bdd-2acccc8f2871 [ 1572.087367] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.087559] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 868cf942-f348-488d-b00a-af4c8b5efda5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.087691] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 95a289ac-3178-45ea-80d2-905b9af54f3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.087811] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.087928] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088062] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088183] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 70c8de27-4696-4005-bbec-e7a33e56311b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088310] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aedbd388-3ef7-410f-b0e3-5ea67ad56b65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088425] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e07ab22e-bd07-4232-abfe-c0617c0b9813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088536] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 6afa4e73-64b4-4b10-b598-433f0c22ecb3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088648] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f307d4d5-e877-4d0a-951c-779c1d2e573b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088782] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.088900] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a0245a18-638d-4c32-bea2-456408b5e001 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089014] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089132] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance deee2c81-4d2c-47d3-aae6-ef829d59c644 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089239] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 68d64a06-f752-459c-a152-157893e79bfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089346] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 4d24bacc-48c4-4649-bb29-fcae2cf77782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089452] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 4bf92157-1d8c-4c3c-bc61-adb6d26bff54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089559] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 29223197-9a79-45cc-baa6-3deb731ec08e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.089664] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Migration b20f235e-fe4d-4b55-9bdd-2acccc8f2871 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1572.089768] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 80a9e17e-4095-498c-80c8-200bfb4f3d1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1572.180284] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776069, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.225854] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.438570] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9e0409-a555-4bc3-8bc3-ae635f81ec7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.458585] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1572.485858] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776068, 'name': ReconfigVM_Task, 'duration_secs': 1.538609} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.486250] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 4bf92157-1d8c-4c3c-bc61-adb6d26bff54/4bf92157-1d8c-4c3c-bc61-adb6d26bff54.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.487035] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b8f7582-c611-497e-9a27-89b92ef0b326 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.495877] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1572.495877] env[62508]: value = "task-1776070" [ 1572.495877] env[62508]: _type = "Task" [ 1572.495877] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.509840] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776070, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.593159] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 145306d7-f0e8-46c0-b2ab-1c41c208f976 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1572.679576] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592367} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.679918] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 29223197-9a79-45cc-baa6-3deb731ec08e/29223197-9a79-45cc-baa6-3deb731ec08e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1572.680204] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1572.680523] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffb7108b-dfc6-48cc-a01b-261b62224d9e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.688565] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1572.688565] env[62508]: value = "task-1776071" [ 1572.688565] env[62508]: _type = "Task" [ 1572.688565] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.699977] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.707593] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1572.708453] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e7e12c-5b24-44d1-b671-cac9b056b37a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.715038] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1572.715038] env[62508]: ERROR oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk due to incomplete transfer. [ 1572.715038] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-14a87390-e98c-484d-b9ee-3a8f8b031099 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.722395] env[62508]: DEBUG oslo_vmware.rw_handles [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc05f1-b19a-5f58-53de-8e1e1e162544/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1572.722619] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Uploaded image cce07762-d034-41a0-9778-c0b0dab30ae0 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1572.724941] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1572.725286] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cdcf7729-cdbd-46c4-8c7d-004ee19a6832 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.732202] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1572.732202] env[62508]: value = "task-1776072" [ 1572.732202] env[62508]: _type = "Task" [ 1572.732202] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.741229] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776072, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.965015] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1572.965527] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59878d8a-8130-46dc-8e20-9a2a8f12c807 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.975460] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1572.975460] env[62508]: value = "task-1776073" [ 1572.975460] env[62508]: _type = "Task" [ 1572.975460] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.984736] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.006679] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776070, 'name': Rename_Task, 'duration_secs': 0.171818} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.007056] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1573.007368] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-772b03bc-e5c2-4221-97f8-a5b8d95dde20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.018056] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1573.018056] env[62508]: value = "task-1776074" [ 1573.018056] env[62508]: _type = "Task" [ 1573.018056] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.026794] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.097083] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a9b92a6d-154c-42bb-842c-bc42a07299a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1573.097701] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1573.097701] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1573.202501] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101598} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.205749] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1573.207025] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea7393c-ed27-4b1f-8173-427cc4b7860f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.232018] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 29223197-9a79-45cc-baa6-3deb731ec08e/29223197-9a79-45cc-baa6-3deb731ec08e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1573.235202] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5d2be9c-0ed0-41b7-8b9b-df52d554c8f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.261101] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776072, 'name': Destroy_Task, 'duration_secs': 0.43619} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.264987] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Destroyed the VM [ 1573.265252] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1573.265616] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1573.265616] env[62508]: value = "task-1776075" [ 1573.265616] env[62508]: _type = "Task" [ 1573.265616] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.265993] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-96833216-ae19-4811-b226-65783a12437d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.276394] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.280331] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1573.280331] env[62508]: value = "task-1776076" [ 1573.280331] env[62508]: _type = "Task" [ 1573.280331] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.288691] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776076, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.462601] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1d4e5b-336b-40d3-82f8-2f4ffe1f53e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.471707] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd315d9-316b-4249-9e65-b8584c931958 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.512091] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c3136e-4f1c-4f91-a990-7ccbf218c255 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.519030] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776073, 'name': PowerOffVM_Task, 'duration_secs': 0.246981} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.523343] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1573.523547] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1573.533282] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe2198b-debd-49ca-a913-f9b842112eeb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.542916] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776074, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.556063] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.779245] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776075, 'name': ReconfigVM_Task, 'duration_secs': 0.372641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.779591] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 29223197-9a79-45cc-baa6-3deb731ec08e/29223197-9a79-45cc-baa6-3deb731ec08e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1573.780370] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17ee135d-92ff-4e6b-9bea-6adc543eef88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.790757] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776076, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.792146] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1573.792146] env[62508]: value = "task-1776077" [ 1573.792146] env[62508]: _type = "Task" [ 1573.792146] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.805500] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776077, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.029284] env[62508]: DEBUG oslo_vmware.api [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776074, 'name': PowerOnVM_Task, 'duration_secs': 0.536771} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.029583] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1574.029804] env[62508]: INFO nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Took 13.90 seconds to spawn the instance on the hypervisor. [ 1574.029995] env[62508]: DEBUG nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1574.030864] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64676f3-7968-4194-af9d-d90bc67f16f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.035773] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.035773] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.035938] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.036125] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.036371] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.036429] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.038027] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.038027] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.038027] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.038027] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.038027] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.042228] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc646767-db0b-42c1-8f35-6756c7009fa9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.059588] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.059910] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.061579] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.069418] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1574.069418] env[62508]: value = "task-1776078" [ 1574.069418] env[62508]: _type = "Task" [ 1574.069418] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.078535] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776078, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.292477] env[62508]: DEBUG oslo_vmware.api [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776076, 'name': RemoveSnapshot_Task, 'duration_secs': 0.942525} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.292837] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1574.293097] env[62508]: INFO nova.compute.manager [None req-6653442d-97ab-4547-b3b6-ef3788f14650 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Took 15.25 seconds to snapshot the instance on the hypervisor. [ 1574.304377] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776077, 'name': Rename_Task, 'duration_secs': 0.149238} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.304695] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1574.304955] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4a0258c-2466-42fd-b5d0-aa9233e5a6ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.311720] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1574.311720] env[62508]: value = "task-1776079" [ 1574.311720] env[62508]: _type = "Task" [ 1574.311720] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.323361] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.569667] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1574.573287] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1574.573488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.531s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.578012] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.100s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.579674] env[62508]: INFO nova.compute.claims [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1574.587760] env[62508]: INFO nova.compute.manager [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Took 33.36 seconds to build instance. [ 1574.595262] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776078, 'name': ReconfigVM_Task, 'duration_secs': 0.269698} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.595578] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1574.822900] env[62508]: DEBUG oslo_vmware.api [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776079, 'name': PowerOnVM_Task, 'duration_secs': 0.490922} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.823194] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1574.823401] env[62508]: INFO nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1574.823618] env[62508]: DEBUG nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1574.824423] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198694f4-cd50-4ff8-86b0-88a7d78ef1ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.092303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1f08a2a9-50af-4fa6-adf1-7a554c736f07 tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.878s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.094737] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.101785] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1575.102700] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1575.102882] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1575.103106] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1575.103266] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1575.103417] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1575.103622] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1575.103786] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1575.103954] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1575.104151] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1575.104331] env[62508]: DEBUG nova.virt.hardware [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1575.110219] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfiguring VM instance instance-0000003f to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1575.111014] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-580bae6c-ae58-4d7d-aff0-68c428b04f9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.134147] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1575.134147] env[62508]: value = "task-1776080" [ 1575.134147] env[62508]: _type = "Task" [ 1575.134147] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.144049] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.342020] env[62508]: INFO nova.compute.manager [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Took 28.95 seconds to build instance. [ 1575.647077] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776080, 'name': ReconfigVM_Task, 'duration_secs': 0.300741} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.647394] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfigured VM instance instance-0000003f to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1575.648209] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d492ba90-89d0-41c1-aa35-15cc52fd3a1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.673558] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1575.676505] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c6b08dd-2490-4202-aa76-d7e57900cf3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.697334] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1575.697334] env[62508]: value = "task-1776081" [ 1575.697334] env[62508]: _type = "Task" [ 1575.697334] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.708350] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.739691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "interface-29223197-9a79-45cc-baa6-3deb731ec08e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.739950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "interface-29223197-9a79-45cc-baa6-3deb731ec08e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.740315] env[62508]: DEBUG nova.objects.instance [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lazy-loading 'flavor' on Instance uuid 29223197-9a79-45cc-baa6-3deb731ec08e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1575.843349] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24597c97-a368-4c57-a443-2a51f2a7c171 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.463s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.925476] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0861cac-99bc-4cd0-9b16-f4aa949913d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.929013] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.929342] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.929614] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.929850] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.930086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.932662] env[62508]: INFO nova.compute.manager [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Terminating instance [ 1575.936611] env[62508]: DEBUG nova.compute.manager [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1575.936833] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1575.937918] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab547bb0-d072-4856-bcd0-dd728f56a883 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.942047] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25153217-aef1-450a-be7f-3b992e8b974b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.949665] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1575.950135] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5983adbd-8cf6-4141-bcf5-3fcee83d0930 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.983518] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fd327c-eb4e-4530-ae20-c4d0799d0e27 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.986558] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1575.986558] env[62508]: value = "task-1776082" [ 1575.986558] env[62508]: _type = "Task" [ 1575.986558] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.995049] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f09bcf-e19e-4c0a-a1a1-868b564f545c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.001391] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.012620] env[62508]: DEBUG nova.compute.provider_tree [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.207389] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776081, 'name': ReconfigVM_Task, 'duration_secs': 0.482746} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.207660] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f/80a9e17e-4095-498c-80c8-200bfb4f3d1f.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.207944] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1576.244487] env[62508]: DEBUG nova.objects.instance [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lazy-loading 'pci_requests' on Instance uuid 29223197-9a79-45cc-baa6-3deb731ec08e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.496671] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776082, 'name': PowerOffVM_Task, 'duration_secs': 0.201933} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.496992] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1576.497188] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1576.497463] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cfaa0c9-caf1-4bdd-a68e-0cd8d0ee3c0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.516318] env[62508]: DEBUG nova.scheduler.client.report [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1576.575357] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1576.575591] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1576.575777] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Deleting the datastore file [datastore1] 4bf92157-1d8c-4c3c-bc61-adb6d26bff54 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1576.576104] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f85a9890-2b8f-47d9-ab0f-10203bb43abe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.582804] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for the task: (returnval){ [ 1576.582804] env[62508]: value = "task-1776084" [ 1576.582804] env[62508]: _type = "Task" [ 1576.582804] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.591474] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.715235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb481490-9276-4c82-84be-749d87df7cc3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.735211] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b369b4-ed1c-465e-902f-06349593ca2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.756241] env[62508]: DEBUG nova.objects.base [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Object Instance<29223197-9a79-45cc-baa6-3deb731ec08e> lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1576.756458] env[62508]: DEBUG nova.network.neutron [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1576.758532] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1576.929802] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6da05f25-0df0-43d2-aef2-032e77bd7bc0 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "interface-29223197-9a79-45cc-baa6-3deb731ec08e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.190s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.957679] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1576.958878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c3f8f3-b0b0-4e2d-a8fd-e24c9cebee74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.965463] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1576.965697] env[62508]: ERROR oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk due to incomplete transfer. [ 1576.965874] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-21f8ee81-6831-4239-81b9-c6a45f96dad4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.973921] env[62508]: DEBUG oslo_vmware.rw_handles [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dd2f3-2f1a-6486-f546-5805c0abfde3/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1576.974133] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Uploaded image d17b0134-9e41-4671-a49a-a34df9610594 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1576.975834] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1576.976350] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7e606d0b-4bde-43f1-a327-95a60dba2c42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.983682] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1576.983682] env[62508]: value = "task-1776085" [ 1576.983682] env[62508]: _type = "Task" [ 1576.983682] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.991640] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776085, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.021357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.021893] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1577.024755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.061s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.026236] env[62508]: INFO nova.compute.claims [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1577.094032] env[62508]: DEBUG oslo_vmware.api [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Task: {'id': task-1776084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.436086} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.094032] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1577.094032] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1577.094207] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1577.094255] env[62508]: INFO nova.compute.manager [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1577.094497] env[62508]: DEBUG oslo.service.loopingcall [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.094743] env[62508]: DEBUG nova.compute.manager [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1577.094850] env[62508]: DEBUG nova.network.neutron [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1577.351121] env[62508]: DEBUG nova.network.neutron [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Port 2fdf0128-ddf8-4030-a2be-bf738efcd699 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1577.390812] env[62508]: DEBUG nova.compute.manager [req-472b6fb5-1bbe-408c-91ea-85a7d0ecb14a req-71ee1a4c-46fa-4f90-9402-91e4c7dd5c72 service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Received event network-vif-deleted-907829cf-4eb7-49fb-92b0-0135b138a80a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1577.391015] env[62508]: INFO nova.compute.manager [req-472b6fb5-1bbe-408c-91ea-85a7d0ecb14a req-71ee1a4c-46fa-4f90-9402-91e4c7dd5c72 service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Neutron deleted interface 907829cf-4eb7-49fb-92b0-0135b138a80a; detaching it from the instance and deleting it from the info cache [ 1577.391223] env[62508]: DEBUG nova.network.neutron [req-472b6fb5-1bbe-408c-91ea-85a7d0ecb14a req-71ee1a4c-46fa-4f90-9402-91e4c7dd5c72 service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.493677] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776085, 'name': Destroy_Task, 'duration_secs': 0.34981} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.493885] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Destroyed the VM [ 1577.494148] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1577.494393] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-710e08cb-db4a-41f7-a008-9f1d6707e50a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.500420] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1577.500420] env[62508]: value = "task-1776086" [ 1577.500420] env[62508]: _type = "Task" [ 1577.500420] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.509233] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776086, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.531991] env[62508]: DEBUG nova.compute.utils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1577.535038] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1577.535186] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1577.577327] env[62508]: DEBUG nova.policy [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285fedd2e7fd4d259ca7fc57c3fcbf46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74c45615efbb425fbec8400f6d225892', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1577.847379] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Successfully created port: ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1577.871402] env[62508]: DEBUG nova.network.neutron [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.893999] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3799c76-e95c-43ce-8ca1-4af0f16a8f09 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.904309] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114ce120-bf78-4ef0-9c49-67f83b5ac12a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.945724] env[62508]: DEBUG nova.compute.manager [req-472b6fb5-1bbe-408c-91ea-85a7d0ecb14a req-71ee1a4c-46fa-4f90-9402-91e4c7dd5c72 service nova] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Detach interface failed, port_id=907829cf-4eb7-49fb-92b0-0135b138a80a, reason: Instance 4bf92157-1d8c-4c3c-bc61-adb6d26bff54 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1578.010662] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776086, 'name': RemoveSnapshot_Task, 'duration_secs': 0.38163} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.010949] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1578.011240] env[62508]: DEBUG nova.compute.manager [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.012035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c185a9-dd8c-435f-af5d-dd0b71bb452e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.035800] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1578.370901] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009a480c-0b24-444f-a3a3-627fe859cddc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.377031] env[62508]: INFO nova.compute.manager [-] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Took 1.28 seconds to deallocate network for instance. [ 1578.379811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.380067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.380974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.389346] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db2fa93-b177-4405-afeb-dc093dbb33ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.425035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dc9884-b952-410e-8df9-f57383aa3a08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.432754] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36e8aa3-7125-478d-b386-aaac4c820eec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.448990] env[62508]: DEBUG nova.compute.provider_tree [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.524543] env[62508]: INFO nova.compute.manager [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Shelve offloading [ 1578.526113] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1578.526359] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89f85cdc-629f-49b6-9247-532afa4117c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.533749] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1578.533749] env[62508]: value = "task-1776087" [ 1578.533749] env[62508]: _type = "Task" [ 1578.533749] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.547876] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1578.548058] env[62508]: DEBUG nova.compute.manager [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.548937] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccab23e-7b24-4bef-94b7-10e327b1fe7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.554322] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.554487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.554681] env[62508]: DEBUG nova.network.neutron [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.817313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "29223197-9a79-45cc-baa6-3deb731ec08e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.817595] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.817803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.818365] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.818550] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.820853] env[62508]: INFO nova.compute.manager [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Terminating instance [ 1578.823116] env[62508]: DEBUG nova.compute.manager [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1578.823325] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1578.824200] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85ae7a5-7792-4230-a310-aec25539b615 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.831686] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1578.831941] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9682392d-a8a9-4433-a5d0-a40cbdf73f0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.838315] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1578.838315] env[62508]: value = "task-1776088" [ 1578.838315] env[62508]: _type = "Task" [ 1578.838315] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.846520] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.886914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.952878] env[62508]: DEBUG nova.scheduler.client.report [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1579.050335] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1579.074648] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1579.074907] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1579.075089] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.075267] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1579.075415] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.075602] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1579.075805] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1579.075958] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1579.076138] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1579.076311] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1579.076476] env[62508]: DEBUG nova.virt.hardware [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1579.077381] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a4b555-a029-4aac-8a46-65f758f35216 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.089058] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e50a96-d7e1-4f85-a0f1-d8bd87e670b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.348646] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776088, 'name': PowerOffVM_Task, 'duration_secs': 0.323298} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.348907] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1579.349084] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1579.349368] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51ca02a1-0a61-44da-a584-fcf57ac2fd95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.385537] env[62508]: DEBUG nova.network.neutron [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.447364] env[62508]: DEBUG nova.compute.manager [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Received event network-vif-plugged-ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1579.448055] env[62508]: DEBUG oslo_concurrency.lockutils [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.448284] env[62508]: DEBUG oslo_concurrency.lockutils [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.448457] env[62508]: DEBUG oslo_concurrency.lockutils [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.448652] env[62508]: DEBUG nova.compute.manager [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] No waiting events found dispatching network-vif-plugged-ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1579.448904] env[62508]: WARNING nova.compute.manager [req-5579c9ed-0fb5-4a07-b4d9-1103affce1f8 req-6139b72d-e7a4-415f-bb44-855fb906363a service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Received unexpected event network-vif-plugged-ec2a5bd1-b682-40fe-825d-7029eb22f70e for instance with vm_state building and task_state spawning. [ 1579.449745] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.449905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.450082] env[62508]: DEBUG nova.network.neutron [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1579.457377] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.457837] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1579.460595] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.235s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.460809] env[62508]: DEBUG nova.objects.instance [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lazy-loading 'resources' on Instance uuid 868cf942-f348-488d-b00a-af4c8b5efda5 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1579.589112] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Successfully updated port: ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1579.714505] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1579.714505] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1579.714699] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Deleting the datastore file [datastore1] 29223197-9a79-45cc-baa6-3deb731ec08e {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1579.714909] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b8a3fc0-db01-4ad9-9d22-8d74c1aa8d3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.722232] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for the task: (returnval){ [ 1579.722232] env[62508]: value = "task-1776090" [ 1579.722232] env[62508]: _type = "Task" [ 1579.722232] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.730849] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.891402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.963943] env[62508]: DEBUG nova.compute.utils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1579.968316] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1579.968485] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1580.008751] env[62508]: DEBUG nova.policy [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274b6e073c6e48b69d2734ca81a7c811', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1965e796bcbd44a1be5a9c1b50698c0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1580.091205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.091348] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.091512] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.234349] env[62508]: DEBUG oslo_vmware.api [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Task: {'id': task-1776090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174394} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.234845] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1580.234845] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1580.234997] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1580.235193] env[62508]: INFO nova.compute.manager [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1580.235704] env[62508]: DEBUG oslo.service.loopingcall [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.235704] env[62508]: DEBUG nova.compute.manager [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1580.235807] env[62508]: DEBUG nova.network.neutron [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1580.286757] env[62508]: DEBUG nova.network.neutron [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.294221] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1580.295140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87c57bb-58e6-4b00-a76c-67ee3cd0689c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.303295] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1580.304468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f708c8d-c985-425c-bf82-27986ceee003 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.306825] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00bd9d8-7237-4b91-baaf-3d7a256fb117 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.316357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804bd6f5-757d-4071-a93e-26a1659857c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.349959] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Successfully created port: a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1580.352063] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55748433-393c-4a1c-a030-e6796bdfb377 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.359449] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8baafc-e0c6-4c5c-922e-557f95c97d8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.372331] env[62508]: DEBUG nova.compute.provider_tree [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.469416] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1580.536018] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1580.536268] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1580.536447] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleting the datastore file [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1580.536710] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8043162b-85f9-4df3-be67-738bdf07119b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.543744] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1580.543744] env[62508]: value = "task-1776092" [ 1580.543744] env[62508]: _type = "Task" [ 1580.543744] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.552350] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.635556] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1580.789882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.835306] env[62508]: DEBUG nova.network.neutron [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating instance_info_cache with network_info: [{"id": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "address": "fa:16:3e:8e:42:7e", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec2a5bd1-b6", "ovs_interfaceid": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.875435] env[62508]: DEBUG nova.scheduler.client.report [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1580.882198] env[62508]: DEBUG nova.compute.manager [req-d80da655-9ff3-4b9c-ac0d-1f4a43926a62 req-c67fe41d-0b27-4be9-abda-f09279221eae service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Received event network-vif-deleted-46b30a18-08fa-41cb-93bd-8be3e3a0fa1e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1580.882410] env[62508]: INFO nova.compute.manager [req-d80da655-9ff3-4b9c-ac0d-1f4a43926a62 req-c67fe41d-0b27-4be9-abda-f09279221eae service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Neutron deleted interface 46b30a18-08fa-41cb-93bd-8be3e3a0fa1e; detaching it from the instance and deleting it from the info cache [ 1580.882660] env[62508]: DEBUG nova.network.neutron [req-d80da655-9ff3-4b9c-ac0d-1f4a43926a62 req-c67fe41d-0b27-4be9-abda-f09279221eae service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.053841] env[62508]: DEBUG oslo_vmware.api [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166471} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.054199] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1581.054285] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1581.054456] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1581.073272] env[62508]: INFO nova.scheduler.client.report [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted allocations for instance aedbd388-3ef7-410f-b0e3-5ea67ad56b65 [ 1581.178123] env[62508]: DEBUG nova.network.neutron [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.313248] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a8fd39-30dc-4d1a-bf5a-80be9351b433 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.334096] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093e51ed-cf5c-481b-9268-73404854fe7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.337447] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.337746] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance network_info: |[{"id": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "address": "fa:16:3e:8e:42:7e", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec2a5bd1-b6", "ovs_interfaceid": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1581.338147] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:42:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec2a5bd1-b682-40fe-825d-7029eb22f70e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1581.345662] env[62508]: DEBUG oslo.service.loopingcall [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.348066] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1581.348429] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1581.351856] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20ee1e6b-de49-4c3f-85e3-81543131bd94 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.371980] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1581.371980] env[62508]: value = "task-1776093" [ 1581.371980] env[62508]: _type = "Task" [ 1581.371980] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.380617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.382328] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776093, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.383034] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.288s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.384263] env[62508]: INFO nova.compute.claims [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1581.387183] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca2f8d08-42af-4a3a-94b7-8050d758b4d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.397456] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3965f9e4-ad57-449e-b00b-2c43933331a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.410016] env[62508]: INFO nova.scheduler.client.report [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Deleted allocations for instance 868cf942-f348-488d-b00a-af4c8b5efda5 [ 1581.438601] env[62508]: DEBUG nova.compute.manager [req-d80da655-9ff3-4b9c-ac0d-1f4a43926a62 req-c67fe41d-0b27-4be9-abda-f09279221eae service nova] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Detach interface failed, port_id=46b30a18-08fa-41cb-93bd-8be3e3a0fa1e, reason: Instance 29223197-9a79-45cc-baa6-3deb731ec08e could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1581.478664] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1581.483554] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Received event network-changed-ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1581.483750] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Refreshing instance network info cache due to event network-changed-ec2a5bd1-b682-40fe-825d-7029eb22f70e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1581.483966] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Acquiring lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.484126] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Acquired lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.484290] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Refreshing network info cache for port ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1581.508850] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1581.509177] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1581.509345] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.509529] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1581.509676] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.509825] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1581.510042] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1581.510208] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1581.510379] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1581.510541] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1581.510714] env[62508]: DEBUG nova.virt.hardware [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1581.511942] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c18dff7-dffd-49eb-82c8-d5f096c2f93a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.520479] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b95d3d-a31d-4f26-96f6-e73de14fbfee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.577603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.681141] env[62508]: INFO nova.compute.manager [-] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Took 1.45 seconds to deallocate network for instance. [ 1581.869063] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1581.869396] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edf75c21-4b11-4691-ae18-6848379a6774 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.876824] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1581.876824] env[62508]: value = "task-1776094" [ 1581.876824] env[62508]: _type = "Task" [ 1581.876824] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.884031] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776093, 'name': CreateVM_Task, 'duration_secs': 0.499614} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.884537] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.885374] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.885749] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.885992] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.889054] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b28069e-3c29-455b-a85f-9df4c85b8795 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.890592] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.896140] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1581.896140] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525137be-6447-7f1a-ade8-1b49eca83757" [ 1581.896140] env[62508]: _type = "Task" [ 1581.896140] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.903902] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525137be-6447-7f1a-ade8-1b49eca83757, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.917459] env[62508]: DEBUG oslo_concurrency.lockutils [None req-56322b21-359c-4983-8406-3550b87ee34b tempest-MigrationsAdminTest-1752776070 tempest-MigrationsAdminTest-1752776070-project-member] Lock "868cf942-f348-488d-b00a-af4c8b5efda5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.975s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.126713] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Successfully updated port: a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1582.188046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.285424] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updated VIF entry in instance network info cache for port ec2a5bd1-b682-40fe-825d-7029eb22f70e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1582.285936] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating instance_info_cache with network_info: [{"id": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "address": "fa:16:3e:8e:42:7e", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec2a5bd1-b6", "ovs_interfaceid": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.388287] env[62508]: DEBUG oslo_vmware.api [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776094, 'name': PowerOnVM_Task, 'duration_secs': 0.417737} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.388600] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1582.388779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-768f2574-aad3-4b7a-8b9e-2e2fcad095ad tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance '80a9e17e-4095-498c-80c8-200bfb4f3d1f' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1582.411491] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525137be-6447-7f1a-ade8-1b49eca83757, 'name': SearchDatastore_Task, 'duration_secs': 0.010253} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.412014] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.412410] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1582.412781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.413066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.413375] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1582.413757] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d681ac7-ea54-47f6-849a-67a5a3e9a3b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.427403] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.427961] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.428834] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23c7be5c-303a-44f2-9c65-0a7abf817e4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.436029] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1582.436029] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c541f-a6cb-a5f1-8b49-ee84d46b655c" [ 1582.436029] env[62508]: _type = "Task" [ 1582.436029] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.446195] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c541f-a6cb-a5f1-8b49-ee84d46b655c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.629113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.629276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.629433] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1582.657427] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1656bc-b92d-4094-8cbe-7ce68788c0f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.664529] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07657a6d-71ca-4034-9326-33eb5d69105c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.694362] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955a6357-d8f7-4cea-a564-1159f3517e42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.701654] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e666f069-7057-4541-bd2f-e100b541f3c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.714790] env[62508]: DEBUG nova.compute.provider_tree [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.788360] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Releasing lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.788624] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-vif-unplugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.788821] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.789032] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.789199] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.789364] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] No waiting events found dispatching network-vif-unplugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1582.789537] env[62508]: WARNING nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received unexpected event network-vif-unplugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f for instance with vm_state shelved_offloaded and task_state None. [ 1582.789700] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.790644] env[62508]: DEBUG nova.compute.manager [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing instance network info cache due to event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1582.790644] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.790644] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.790644] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.948990] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c541f-a6cb-a5f1-8b49-ee84d46b655c, 'name': SearchDatastore_Task, 'duration_secs': 0.015084} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.949837] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d610dd3-b994-4ec0-a075-1386a4c55de0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.955698] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1582.955698] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529b17bd-ef52-66b6-e454-f3b7afd0c227" [ 1582.955698] env[62508]: _type = "Task" [ 1582.955698] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.963430] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529b17bd-ef52-66b6-e454-f3b7afd0c227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.121079] env[62508]: DEBUG nova.compute.manager [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Received event network-vif-plugged-a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1583.121425] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Acquiring lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.121823] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.121988] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.122284] env[62508]: DEBUG nova.compute.manager [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] No waiting events found dispatching network-vif-plugged-a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1583.122512] env[62508]: WARNING nova.compute.manager [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Received unexpected event network-vif-plugged-a74e9898-5ccf-4408-a728-3868a07bff74 for instance with vm_state building and task_state spawning. [ 1583.122765] env[62508]: DEBUG nova.compute.manager [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Received event network-changed-a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1583.123015] env[62508]: DEBUG nova.compute.manager [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Refreshing instance network info cache due to event network-changed-a74e9898-5ccf-4408-a728-3868a07bff74. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1583.123250] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Acquiring lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.167781] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1583.217738] env[62508]: DEBUG nova.scheduler.client.report [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1583.336256] env[62508]: DEBUG nova.network.neutron [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Updating instance_info_cache with network_info: [{"id": "a74e9898-5ccf-4408-a728-3868a07bff74", "address": "fa:16:3e:c2:db:cf", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa74e9898-5c", "ovs_interfaceid": "a74e9898-5ccf-4408-a728-3868a07bff74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.465332] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529b17bd-ef52-66b6-e454-f3b7afd0c227, 'name': SearchDatastore_Task, 'duration_secs': 0.457384} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.465609] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.465905] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.466184] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbb94f15-c4c5-46d3-a322-0ff9b98fd307 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.472991] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1583.472991] env[62508]: value = "task-1776095" [ 1583.472991] env[62508]: _type = "Task" [ 1583.472991] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.481504] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.515791] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updated VIF entry in instance network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.516200] env[62508]: DEBUG nova.network.neutron [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.519742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.723282] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.723846] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1583.726733] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.840s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.726903] env[62508]: DEBUG nova.objects.instance [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lazy-loading 'resources' on Instance uuid 4bf92157-1d8c-4c3c-bc61-adb6d26bff54 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.839104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.839495] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Instance network_info: |[{"id": "a74e9898-5ccf-4408-a728-3868a07bff74", "address": "fa:16:3e:c2:db:cf", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa74e9898-5c", "ovs_interfaceid": "a74e9898-5ccf-4408-a728-3868a07bff74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1583.839820] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Acquired lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.840014] env[62508]: DEBUG nova.network.neutron [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Refreshing network info cache for port a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1583.841312] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:db:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a74e9898-5ccf-4408-a728-3868a07bff74', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1583.850173] env[62508]: DEBUG oslo.service.loopingcall [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1583.851426] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1583.851683] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27a74ea8-86a3-4469-8574-bc8c65b1be00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.873077] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1583.873077] env[62508]: value = "task-1776096" [ 1583.873077] env[62508]: _type = "Task" [ 1583.873077] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.883909] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776096, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.982917] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776095, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.019210] env[62508]: DEBUG oslo_concurrency.lockutils [req-fd61de4b-396e-4f07-b18e-81d73b2f3c11 req-ed20e72b-de57-439f-9e90-47cc51d5ddd3 service nova] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.230020] env[62508]: DEBUG nova.compute.utils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1584.236716] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1584.236716] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1584.303599] env[62508]: DEBUG nova.policy [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '596ff35abb3949e9b3d3d9b80e6eae69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a9d6b8eb4e44a7a3d7fa4abe0cd5bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1584.383571] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776096, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.496987] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776095, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.596911] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cdf77e-adc4-44fd-98e6-a956f991e790 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.608874] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ba82fb-5c9d-488f-9596-c7c8e345086a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.642223] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9353e4a5-6940-42d5-a7c3-82bd3da872f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.651173] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6a6477-f585-44ca-8c8e-73c5aec2fa58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.668221] env[62508]: DEBUG nova.compute.provider_tree [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.673666] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Successfully created port: e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1584.736523] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1584.774959] env[62508]: DEBUG nova.network.neutron [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Updated VIF entry in instance network info cache for port a74e9898-5ccf-4408-a728-3868a07bff74. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1584.775355] env[62508]: DEBUG nova.network.neutron [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Updating instance_info_cache with network_info: [{"id": "a74e9898-5ccf-4408-a728-3868a07bff74", "address": "fa:16:3e:c2:db:cf", "network": {"id": "73c15752-ef12-4a06-b340-8848b11c86c2", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-836710224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1965e796bcbd44a1be5a9c1b50698c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa74e9898-5c", "ovs_interfaceid": "a74e9898-5ccf-4408-a728-3868a07bff74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.884773] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776096, 'name': CreateVM_Task, 'duration_secs': 0.528324} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.884913] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1584.885607] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.886182] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.886182] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1584.886378] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85b5b3e8-f336-44f5-9dc5-e4088ce3c272 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.891932] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1584.891932] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e13424-06b2-50b4-f02e-a2f64e65e77c" [ 1584.891932] env[62508]: _type = "Task" [ 1584.891932] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.900313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.900550] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.900732] env[62508]: DEBUG nova.compute.manager [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Going to confirm migration 3 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1584.902141] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e13424-06b2-50b4-f02e-a2f64e65e77c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.984797] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776095, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.096663} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.985155] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.985465] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.985783] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0f44086-cab9-47eb-a0ef-c89b59353c1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.993159] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1584.993159] env[62508]: value = "task-1776097" [ 1584.993159] env[62508]: _type = "Task" [ 1584.993159] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.000973] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.177180] env[62508]: DEBUG nova.scheduler.client.report [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1585.278546] env[62508]: DEBUG oslo_concurrency.lockutils [req-7066c687-ac3b-43ef-b4d6-2a9fea262af3 req-75e17a79-1dfc-43e7-b93f-3934a9cef050 service nova] Releasing lock "refresh_cache-a9b92a6d-154c-42bb-842c-bc42a07299a0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.402480] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e13424-06b2-50b4-f02e-a2f64e65e77c, 'name': SearchDatastore_Task, 'duration_secs': 0.009843} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.405150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.405373] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.405598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.406260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.406260] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.407871] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3697927-ec3b-4249-a0a2-96baa458beb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.418464] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.418464] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1585.419100] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7e2d51-942d-4300-81e9-a77a01255f53 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.424844] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1585.424844] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527c0a29-867f-19fc-73b8-50414d8691fd" [ 1585.424844] env[62508]: _type = "Task" [ 1585.424844] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.433957] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527c0a29-867f-19fc-73b8-50414d8691fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.476059] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.476252] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.476433] env[62508]: DEBUG nova.network.neutron [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1585.476618] env[62508]: DEBUG nova.objects.instance [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'info_cache' on Instance uuid 80a9e17e-4095-498c-80c8-200bfb4f3d1f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.503508] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070517} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.503842] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1585.504657] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60dd8dd-c2eb-4958-b3ef-5c30cd767e6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.528678] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1585.529031] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6793fe55-3259-4621-9155-bca8f4761bfb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.551344] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1585.551344] env[62508]: value = "task-1776098" [ 1585.551344] env[62508]: _type = "Task" [ 1585.551344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.560885] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776098, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.683181] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.685720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.108s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.685808] env[62508]: DEBUG nova.objects.instance [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'resources' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.705866] env[62508]: INFO nova.scheduler.client.report [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Deleted allocations for instance 4bf92157-1d8c-4c3c-bc61-adb6d26bff54 [ 1585.746733] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1585.778535] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='df87458fea3dfeaa56e3f4f7b543f690',container_format='bare',created_at=2024-12-11T22:13:36Z,direct_url=,disk_format='vmdk',id=cce07762-d034-41a0-9778-c0b0dab30ae0,min_disk=1,min_ram=0,name='tempest-test-snap-639071975',owner='15a9d6b8eb4e44a7a3d7fa4abe0cd5bb',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-11T22:13:51Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1585.778535] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1585.778535] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1585.778757] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1585.778953] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1585.779440] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1585.779843] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1585.780037] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1585.780313] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1585.780601] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1585.780878] env[62508]: DEBUG nova.virt.hardware [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1585.781884] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d8bc68-f23c-4ea6-8fad-f111df362c59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.791536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b7ce77-d1eb-41b0-91d6-071eefb3db90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.934948] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527c0a29-867f-19fc-73b8-50414d8691fd, 'name': SearchDatastore_Task, 'duration_secs': 0.079098} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.935818] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-800f26da-9467-4693-8150-58a8440fb115 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.941313] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1585.941313] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52652393-f282-78b7-ea7f-42efee382da7" [ 1585.941313] env[62508]: _type = "Task" [ 1585.941313] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.953964] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52652393-f282-78b7-ea7f-42efee382da7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.061989] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776098, 'name': ReconfigVM_Task, 'duration_secs': 0.44909} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.061989] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1586.062653] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0bade65-e89a-4d9c-8429-a0a254bd3d90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.069961] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1586.069961] env[62508]: value = "task-1776099" [ 1586.069961] env[62508]: _type = "Task" [ 1586.069961] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.077660] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776099, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.114300] env[62508]: DEBUG nova.compute.manager [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Received event network-vif-plugged-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1586.114579] env[62508]: DEBUG oslo_concurrency.lockutils [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] Acquiring lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.114900] env[62508]: DEBUG oslo_concurrency.lockutils [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.115277] env[62508]: DEBUG oslo_concurrency.lockutils [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.115537] env[62508]: DEBUG nova.compute.manager [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] No waiting events found dispatching network-vif-plugged-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1586.115789] env[62508]: WARNING nova.compute.manager [req-a13499ba-d3b4-45a8-839c-e5d2dfd6dc8e req-dba4adf5-90b7-4df5-ba8d-621c450bab26 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Received unexpected event network-vif-plugged-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 for instance with vm_state building and task_state spawning. [ 1586.188618] env[62508]: DEBUG nova.objects.instance [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'numa_topology' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1586.214209] env[62508]: DEBUG oslo_concurrency.lockutils [None req-394ff7c0-3762-4a30-a13e-3c9a9afe633d tempest-InstanceActionsV221TestJSON-931103043 tempest-InstanceActionsV221TestJSON-931103043-project-member] Lock "4bf92157-1d8c-4c3c-bc61-adb6d26bff54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.285s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.296096] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Successfully updated port: e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1586.454419] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52652393-f282-78b7-ea7f-42efee382da7, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.455664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.455878] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a9b92a6d-154c-42bb-842c-bc42a07299a0/a9b92a6d-154c-42bb-842c-bc42a07299a0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1586.457284] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cd7e83e-9785-4edf-b59f-db3bbbf30e00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.463804] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1586.463804] env[62508]: value = "task-1776100" [ 1586.463804] env[62508]: _type = "Task" [ 1586.463804] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.478054] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.580405] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776099, 'name': Rename_Task, 'duration_secs': 0.143786} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.580405] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1586.580951] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca1b11c9-2ec6-4e21-a2ca-de6f9ad20ae5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.587614] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1586.587614] env[62508]: value = "task-1776101" [ 1586.587614] env[62508]: _type = "Task" [ 1586.587614] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.606496] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.692387] env[62508]: DEBUG nova.objects.base [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1586.799540] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.799671] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.799868] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1586.915272] env[62508]: DEBUG nova.network.neutron [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [{"id": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "address": "fa:16:3e:70:a8:a5", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdf0128-dd", "ovs_interfaceid": "2fdf0128-ddf8-4030-a2be-bf738efcd699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.978617] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776100, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.105927] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776101, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.111686] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97215117-1666-41da-b09d-1805424f66e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.118703] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6941ce2b-38db-48c5-960f-9bdad8898fa8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.149810] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0825ad71-7863-4a6f-81ac-f3d6166af618 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.157855] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0e57a9-90d9-40a6-83fc-fc9eac4f7445 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.174680] env[62508]: DEBUG nova.compute.provider_tree [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1587.352094] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1587.419926] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-80a9e17e-4095-498c-80c8-200bfb4f3d1f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.422022] env[62508]: DEBUG nova.objects.instance [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'migration_context' on Instance uuid 80a9e17e-4095-498c-80c8-200bfb4f3d1f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1587.483634] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542877} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.483894] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a9b92a6d-154c-42bb-842c-bc42a07299a0/a9b92a6d-154c-42bb-842c-bc42a07299a0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1587.484120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1587.484378] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5558d648-2a05-480a-8b68-dd6354cb3535 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.491786] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1587.491786] env[62508]: value = "task-1776102" [ 1587.491786] env[62508]: _type = "Task" [ 1587.491786] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.501749] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.601659] env[62508]: DEBUG oslo_vmware.api [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776101, 'name': PowerOnVM_Task, 'duration_secs': 0.862549} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.601940] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1587.602543] env[62508]: INFO nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1587.602644] env[62508]: DEBUG nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1587.603416] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b2589b-5eae-47e3-9f13-654ca08b3a19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.648179] env[62508]: DEBUG nova.network.neutron [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Updating instance_info_cache with network_info: [{"id": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "address": "fa:16:3e:cf:d2:46", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape32abf3b-06", "ovs_interfaceid": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.680019] env[62508]: DEBUG nova.scheduler.client.report [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1587.925407] env[62508]: DEBUG nova.objects.base [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Object Instance<80a9e17e-4095-498c-80c8-200bfb4f3d1f> lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1587.926611] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effcf21d-a9dc-4ec3-8e1d-c097a08f335f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.950170] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e7810a1-2ab5-4335-9aaf-55efd056b9d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.957186] env[62508]: DEBUG oslo_vmware.api [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1587.957186] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d06a-cd34-ab66-cc7e-012d28070f80" [ 1587.957186] env[62508]: _type = "Task" [ 1587.957186] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.966995] env[62508]: DEBUG oslo_vmware.api [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d06a-cd34-ab66-cc7e-012d28070f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.001992] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079309} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.002612] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.003421] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa0f4a-d6bc-4483-94da-c83cd1c97a31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.030787] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a9b92a6d-154c-42bb-842c-bc42a07299a0/a9b92a6d-154c-42bb-842c-bc42a07299a0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.031641] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b79fd3f1-f009-4aa2-9fd2-806b47c0dc62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.055013] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1588.055013] env[62508]: value = "task-1776103" [ 1588.055013] env[62508]: _type = "Task" [ 1588.055013] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.065638] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776103, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.122920] env[62508]: INFO nova.compute.manager [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Took 21.67 seconds to build instance. [ 1588.149686] env[62508]: DEBUG nova.compute.manager [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Received event network-changed-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.149990] env[62508]: DEBUG nova.compute.manager [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Refreshing instance network info cache due to event network-changed-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1588.150206] env[62508]: DEBUG oslo_concurrency.lockutils [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] Acquiring lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.150707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.151380] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Instance network_info: |[{"id": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "address": "fa:16:3e:cf:d2:46", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape32abf3b-06", "ovs_interfaceid": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1588.151804] env[62508]: DEBUG oslo_concurrency.lockutils [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] Acquired lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.152115] env[62508]: DEBUG nova.network.neutron [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Refreshing network info cache for port e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.157541] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:d2:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e32abf3b-06cd-43e5-a59b-616b6d7d3ff2', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1588.165101] env[62508]: DEBUG oslo.service.loopingcall [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.167042] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1588.167490] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1928bcd8-aa1a-48b1-9b5d-a8ef8d3c8157 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.193594] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.506s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.196152] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.008s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.196512] env[62508]: DEBUG nova.objects.instance [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lazy-loading 'resources' on Instance uuid 29223197-9a79-45cc-baa6-3deb731ec08e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.203830] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.203830] env[62508]: value = "task-1776104" [ 1588.203830] env[62508]: _type = "Task" [ 1588.203830] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.215797] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776104, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.471866] env[62508]: DEBUG oslo_vmware.api [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d06a-cd34-ab66-cc7e-012d28070f80, 'name': SearchDatastore_Task, 'duration_secs': 0.035762} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.472127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.570380] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.625206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ed7a2e37-ca0f-4795-a7e9-5c2de5d67476 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.181s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.706265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3ffc02db-4b0d-430a-bdda-a89bfbd4ad4c tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.139s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.713903] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.193s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.714732] env[62508]: INFO nova.compute.manager [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Unshelving [ 1588.724830] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776104, 'name': CreateVM_Task, 'duration_secs': 0.456129} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.725131] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1588.725816] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.725999] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.726385] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1588.726661] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15bbacbd-56a5-449e-91de-57017ad4a19a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.733856] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1588.733856] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5272237d-ac72-66eb-22c2-bea068c3d17d" [ 1588.733856] env[62508]: _type = "Task" [ 1588.733856] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.750033] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5272237d-ac72-66eb-22c2-bea068c3d17d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.070970] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776103, 'name': ReconfigVM_Task, 'duration_secs': 0.856767} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.071312] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a9b92a6d-154c-42bb-842c-bc42a07299a0/a9b92a6d-154c-42bb-842c-bc42a07299a0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.072585] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0655d545-32f8-4a7c-8536-da4b93dcf0f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.082018] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1589.082018] env[62508]: value = "task-1776105" [ 1589.082018] env[62508]: _type = "Task" [ 1589.082018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.090743] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776105, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.127815] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191a9fdd-4e33-4c4e-ba6b-bc474fec1fa6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.135986] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8de8ed-d90f-44ac-9434-d27872d9ad6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.172813] env[62508]: DEBUG nova.network.neutron [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Updated VIF entry in instance network info cache for port e32abf3b-06cd-43e5-a59b-616b6d7d3ff2. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.173556] env[62508]: DEBUG nova.network.neutron [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Updating instance_info_cache with network_info: [{"id": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "address": "fa:16:3e:cf:d2:46", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape32abf3b-06", "ovs_interfaceid": "e32abf3b-06cd-43e5-a59b-616b6d7d3ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.176122] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808822e5-1d17-47fe-aa90-569eb59ae7d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.187890] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79352cff-1402-4f31-8d26-f43327028bd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.204397] env[62508]: DEBUG nova.compute.provider_tree [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.245958] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.246562] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Processing image cce07762-d034-41a0-9778-c0b0dab30ae0 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1589.246562] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.246672] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.248706] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1589.248706] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-758c0f7b-4ae8-42d7-8eed-b8ccf7af5af0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.265619] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1589.266038] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1589.266900] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47384372-6048-475d-bdc3-463f6614c981 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.272851] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1589.272851] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dca4a4-f7a6-2a80-b761-511796eac5a7" [ 1589.272851] env[62508]: _type = "Task" [ 1589.272851] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.283356] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dca4a4-f7a6-2a80-b761-511796eac5a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.429391] env[62508]: DEBUG nova.compute.manager [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Received event network-changed-ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1589.430030] env[62508]: DEBUG nova.compute.manager [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Refreshing instance network info cache due to event network-changed-ec2a5bd1-b682-40fe-825d-7029eb22f70e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1589.430030] env[62508]: DEBUG oslo_concurrency.lockutils [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] Acquiring lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.430030] env[62508]: DEBUG oslo_concurrency.lockutils [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] Acquired lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.430501] env[62508]: DEBUG nova.network.neutron [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Refreshing network info cache for port ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.596024] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776105, 'name': Rename_Task, 'duration_secs': 0.232043} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.597020] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1589.597020] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16341374-7fbf-495a-99eb-f1640d275953 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.603765] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1589.603765] env[62508]: value = "task-1776106" [ 1589.603765] env[62508]: _type = "Task" [ 1589.603765] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.613355] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.676711] env[62508]: DEBUG oslo_concurrency.lockutils [req-c8884be2-332f-41e6-bc2d-2c27cc552929 req-9e4ab1ca-e443-4284-bb22-3946415d1784 service nova] Releasing lock "refresh_cache-14c911d6-44c2-4c56-a027-3d25a1e58bcc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.708424] env[62508]: DEBUG nova.scheduler.client.report [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1589.747207] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.784278] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1589.784545] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Fetch image to [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5/OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1589.784735] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Downloading stream optimized image cce07762-d034-41a0-9778-c0b0dab30ae0 to [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5/OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5.vmdk on the data store datastore1 as vApp {{(pid=62508) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1589.785021] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Downloading image file data cce07762-d034-41a0-9778-c0b0dab30ae0 to the ESX as VM named 'OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5' {{(pid=62508) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1589.869191] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1589.869191] env[62508]: value = "resgroup-9" [ 1589.869191] env[62508]: _type = "ResourcePool" [ 1589.869191] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1589.869191] env[62508]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2517156d-cf8f-4212-b5dd-bd1e40a3d4ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.894721] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease: (returnval){ [ 1589.894721] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1589.894721] env[62508]: _type = "HttpNfcLease" [ 1589.894721] env[62508]: } obtained for vApp import into resource pool (val){ [ 1589.894721] env[62508]: value = "resgroup-9" [ 1589.894721] env[62508]: _type = "ResourcePool" [ 1589.894721] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1589.895070] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the lease: (returnval){ [ 1589.895070] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1589.895070] env[62508]: _type = "HttpNfcLease" [ 1589.895070] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1589.902189] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1589.902189] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1589.902189] env[62508]: _type = "HttpNfcLease" [ 1589.902189] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1590.122209] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776106, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.214247] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.216829] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.745s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.252326] env[62508]: INFO nova.scheduler.client.report [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Deleted allocations for instance 29223197-9a79-45cc-baa6-3deb731ec08e [ 1590.296197] env[62508]: DEBUG nova.network.neutron [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updated VIF entry in instance network info cache for port ec2a5bd1-b682-40fe-825d-7029eb22f70e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.296651] env[62508]: DEBUG nova.network.neutron [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating instance_info_cache with network_info: [{"id": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "address": "fa:16:3e:8e:42:7e", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec2a5bd1-b6", "ovs_interfaceid": "ec2a5bd1-b682-40fe-825d-7029eb22f70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.404324] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1590.404324] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1590.404324] env[62508]: _type = "HttpNfcLease" [ 1590.404324] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1590.427164] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.427692] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.616365] env[62508]: DEBUG oslo_vmware.api [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776106, 'name': PowerOnVM_Task, 'duration_secs': 0.587058} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.616673] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1590.616926] env[62508]: INFO nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1590.617167] env[62508]: DEBUG nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1590.617983] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6eabf0-88f6-4b5e-8315-e8d7ae55202a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.626841] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "4cc6d0f4-413a-44e1-850f-da499f582d15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.627146] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.769487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c2a5459a-73f4-41f6-a2ea-dd09bbc21884 tempest-AttachInterfacesV270Test-822379910 tempest-AttachInterfacesV270Test-822379910-project-member] Lock "29223197-9a79-45cc-baa6-3deb731ec08e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.952s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.803322] env[62508]: DEBUG oslo_concurrency.lockutils [req-77e2326d-ffbf-4068-9cd3-ffd237ee75bd req-3a7b98a4-c387-4dee-a4db-1a0ccf556880 service nova] Releasing lock "refresh_cache-145306d7-f0e8-46c0-b2ab-1c41c208f976" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.906548] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1590.906548] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1590.906548] env[62508]: _type = "HttpNfcLease" [ 1590.906548] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1590.906783] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1590.906783] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209f0b5-3f64-87b5-b230-53ecad57bf9f" [ 1590.906783] env[62508]: _type = "HttpNfcLease" [ 1590.906783] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1590.907592] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef56230-b26a-4885-b401-1065dd3ffee9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.920494] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1590.920878] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1590.987803] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1591.002524] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.002524] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.003645] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3cad57d7-bb96-42e4-87cf-458425be7a80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.129609] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1591.142090] env[62508]: INFO nova.compute.manager [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Took 22.20 seconds to build instance. [ 1591.221326] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caecd49-451c-4848-81ec-f61ca2a6cf2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.230798] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f03125-7d89-4dd5-8ee7-2f0e987c4e11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.263965] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cece712e-8c9a-4acb-a35a-14d1893d55f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.272507] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911ade4d-5f28-4b5e-9776-6384dedbc3f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.290166] env[62508]: DEBUG nova.compute.provider_tree [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.439194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.439439] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.507473] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1591.514546] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.644985] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6eeaa912-f830-489a-8015-1b5e5df417c5 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.708s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.655308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.796976] env[62508]: DEBUG nova.scheduler.client.report [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1591.943529] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1591.971261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.972027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.972027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.972187] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.972467] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.975591] env[62508]: INFO nova.compute.manager [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Terminating instance [ 1591.981186] env[62508]: DEBUG nova.compute.manager [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1591.981186] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1591.981186] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eb862e-e9cf-416b-9c42-a701cc7e7651 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.990418] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1591.990669] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b13fca0-6530-40d2-b23a-fb653dfc19c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.001677] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1592.001677] env[62508]: value = "task-1776108" [ 1592.001677] env[62508]: _type = "Task" [ 1592.001677] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.012896] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.038284] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.112891] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1592.113170] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1592.114071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1960bf-ce87-4844-a074-972bc5667415 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.120941] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1592.121102] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1592.121366] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-edd5eb72-0eee-4841-b639-dff4f2ebbfa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.475477] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.505430] env[62508]: DEBUG oslo_vmware.rw_handles [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523a713f-99b7-7f74-f32f-eff2238945be/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1592.505430] env[62508]: INFO nova.virt.vmwareapi.images [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Downloaded image file data cce07762-d034-41a0-9778-c0b0dab30ae0 [ 1592.508611] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ba1cbb-c936-429c-8162-77bbb1099391 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.517997] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776108, 'name': PowerOffVM_Task, 'duration_secs': 0.260489} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.528982] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1592.528982] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1592.529322] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-536c92a3-a0e3-46e6-9fa8-9d2b2b4c9ef1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.530961] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f355235a-a634-45fb-afbe-c89798ba14eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.637237] env[62508]: INFO nova.virt.vmwareapi.images [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] The imported VM was unregistered [ 1592.639691] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1592.639865] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1592.640312] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79f03093-a867-41a1-98d1-b92c45d5447d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.662624] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1592.662942] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5/OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5.vmdk to [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk. {{(pid=62508) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1592.663032] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6a4f09e2-53ae-4f96-aeb6-6bc566d890b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.670230] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1592.670230] env[62508]: value = "task-1776111" [ 1592.670230] env[62508]: _type = "Task" [ 1592.670230] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.680175] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.686885] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1592.687097] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1592.687283] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleting the datastore file [datastore1] a9b92a6d-154c-42bb-842c-bc42a07299a0 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1592.687541] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-327b49ee-22f9-4cda-affe-734ee1f4fb89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.694228] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for the task: (returnval){ [ 1592.694228] env[62508]: value = "task-1776112" [ 1592.694228] env[62508]: _type = "Task" [ 1592.694228] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.702908] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.808035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.591s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.810701] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.064s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.811741] env[62508]: DEBUG nova.objects.instance [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'pci_requests' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.180351] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.204338] env[62508]: DEBUG oslo_vmware.api [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Task: {'id': task-1776112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329144} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.204635] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1593.204839] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1593.205151] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1593.205389] env[62508]: INFO nova.compute.manager [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1593.205702] env[62508]: DEBUG oslo.service.loopingcall [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.205962] env[62508]: DEBUG nova.compute.manager [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1593.206094] env[62508]: DEBUG nova.network.neutron [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1593.317176] env[62508]: DEBUG nova.objects.instance [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'numa_topology' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.379121] env[62508]: INFO nova.scheduler.client.report [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocation for migration b20f235e-fe4d-4b55-9bdd-2acccc8f2871 [ 1593.624610] env[62508]: DEBUG nova.compute.manager [req-1cb7ee2b-40b3-46a9-ab19-27534eec85ac req-3d82db3f-1458-4c71-9bdf-aa969060fefd service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Received event network-vif-deleted-a74e9898-5ccf-4408-a728-3868a07bff74 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1593.624824] env[62508]: INFO nova.compute.manager [req-1cb7ee2b-40b3-46a9-ab19-27534eec85ac req-3d82db3f-1458-4c71-9bdf-aa969060fefd service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Neutron deleted interface a74e9898-5ccf-4408-a728-3868a07bff74; detaching it from the instance and deleting it from the info cache [ 1593.625207] env[62508]: DEBUG nova.network.neutron [req-1cb7ee2b-40b3-46a9-ab19-27534eec85ac req-3d82db3f-1458-4c71-9bdf-aa969060fefd service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.680962] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.822143] env[62508]: INFO nova.compute.claims [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1593.886072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5424b0a-5cb3-4066-8514-27350b238822 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.985s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.101522] env[62508]: DEBUG nova.network.neutron [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.127950] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d16325d6-09e5-4d44-8a9c-1359a72004f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.142167] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad0e21b-5a52-4b36-b775-ddbeed050338 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.184210] env[62508]: DEBUG nova.compute.manager [req-1cb7ee2b-40b3-46a9-ab19-27534eec85ac req-3d82db3f-1458-4c71-9bdf-aa969060fefd service nova] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Detach interface failed, port_id=a74e9898-5ccf-4408-a728-3868a07bff74, reason: Instance a9b92a6d-154c-42bb-842c-bc42a07299a0 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1594.194642] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.604354] env[62508]: INFO nova.compute.manager [-] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Took 1.40 seconds to deallocate network for instance. [ 1594.695706] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.114011] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.198475] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.260102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a48576-4059-4fc5-8451-f5f260f734a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.271204] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff95cdcc-6e22-4310-b4a6-746701eb44c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.309400] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c42ddfa-4139-4521-adb0-1c03248f7c6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.320628] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d741da0-61b5-47a0-ac8d-d36915fe2ca2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.342728] env[62508]: DEBUG nova.compute.provider_tree [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.696500] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.846929] env[62508]: DEBUG nova.scheduler.client.report [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1595.896030] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.896275] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.896491] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.896672] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.896838] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.899238] env[62508]: INFO nova.compute.manager [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Terminating instance [ 1595.901068] env[62508]: DEBUG nova.compute.manager [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1595.901270] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1595.902119] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7adee1-5a27-4337-b8b3-d6d8de0c9d71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.910319] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1595.910583] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39fda02c-bd5b-44cc-a656-a6f51e638688 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.916952] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1595.916952] env[62508]: value = "task-1776113" [ 1595.916952] env[62508]: _type = "Task" [ 1595.916952] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.925658] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.197599] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.330341] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.330570] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.331776] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.331981] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.352155] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.541s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.354529] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.840s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.357707] env[62508]: INFO nova.compute.claims [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.405823] env[62508]: INFO nova.network.neutron [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating port bd3f776a-1eed-4e8d-b7f3-d958db372a2f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1596.430463] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776113, 'name': PowerOffVM_Task, 'duration_secs': 0.227883} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.430463] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1596.430463] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.430463] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3746e6ce-4491-49e1-9255-b6ade5dfbba0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.556329] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1596.556575] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1596.556764] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] 80a9e17e-4095-498c-80c8-200bfb4f3d1f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1596.557048] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d2d51df-6422-4b7f-a3ca-a03091f1aa5c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.563939] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1596.563939] env[62508]: value = "task-1776115" [ 1596.563939] env[62508]: _type = "Task" [ 1596.563939] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.572884] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.697635] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.834777] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1596.841158] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1597.074233] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.199041] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776111, 'name': MoveVirtualDisk_Task, 'duration_secs': 4.454404} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.199347] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5/OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5.vmdk to [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk. [ 1597.199563] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Cleaning up location [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1597.199741] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_5e0eecd6-2fa6-4e68-a73c-27c1d6d5adf5 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.200042] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-978df8ce-937c-4505-b789-d3e39839228c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.206941] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1597.206941] env[62508]: value = "task-1776116" [ 1597.206941] env[62508]: _type = "Task" [ 1597.206941] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.216508] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.362472] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.363484] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.574592] env[62508]: DEBUG oslo_vmware.api [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.680194} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.577008] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1597.577218] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1597.577447] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1597.577682] env[62508]: INFO nova.compute.manager [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1597.577901] env[62508]: DEBUG oslo.service.loopingcall [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.578286] env[62508]: DEBUG nova.compute.manager [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1597.578388] env[62508]: DEBUG nova.network.neutron [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1597.687980] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9568ce11-1374-4a9d-80e6-4fefe4ce79c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.695262] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74294851-84e8-4b45-a246-df05bcfd16dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.730517] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be155867-1d42-4768-9f58-e5b2f15f2435 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.741276] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2c8e54-c375-4630-8074-34413f205721 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.745403] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042155} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.745654] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1597.745832] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.746134] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk to [datastore1] 14c911d6-44c2-4c56-a027-3d25a1e58bcc/14c911d6-44c2-4c56-a027-3d25a1e58bcc.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1597.746728] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3689aee7-e819-4989-a9e5-4792420f0b5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.756511] env[62508]: DEBUG nova.compute.provider_tree [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1597.763811] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1597.763811] env[62508]: value = "task-1776117" [ 1597.763811] env[62508]: _type = "Task" [ 1597.763811] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.771637] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.106692] env[62508]: DEBUG nova.compute.manager [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1598.106920] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.107148] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.107355] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.107520] env[62508]: DEBUG nova.compute.manager [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] No waiting events found dispatching network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1598.107691] env[62508]: WARNING nova.compute.manager [req-4c18a75e-aa83-4287-9619-91b14708fe3e req-99d04fa9-202a-44c8-80c1-57cc5b8a36c8 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received unexpected event network-vif-plugged-bd3f776a-1eed-4e8d-b7f3-d958db372a2f for instance with vm_state shelved_offloaded and task_state spawning. [ 1598.152282] env[62508]: DEBUG nova.compute.manager [req-15cc44b9-672e-4f26-90d7-d973c8abfe98 req-3c144701-b726-4638-8905-ac9c05a0bdc6 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Received event network-vif-deleted-2fdf0128-ddf8-4030-a2be-bf738efcd699 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1598.152526] env[62508]: INFO nova.compute.manager [req-15cc44b9-672e-4f26-90d7-d973c8abfe98 req-3c144701-b726-4638-8905-ac9c05a0bdc6 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Neutron deleted interface 2fdf0128-ddf8-4030-a2be-bf738efcd699; detaching it from the instance and deleting it from the info cache [ 1598.152743] env[62508]: DEBUG nova.network.neutron [req-15cc44b9-672e-4f26-90d7-d973c8abfe98 req-3c144701-b726-4638-8905-ac9c05a0bdc6 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.197594] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.198083] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.198083] env[62508]: DEBUG nova.network.neutron [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1598.260159] env[62508]: DEBUG nova.scheduler.client.report [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1598.275658] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.630484] env[62508]: DEBUG nova.network.neutron [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.655530] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dd59d06-4ae6-4fb5-87ac-e7f8dc33e429 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.668330] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49bd810-8285-4353-b7f8-b659f57ac0ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.712149] env[62508]: DEBUG nova.compute.manager [req-15cc44b9-672e-4f26-90d7-d973c8abfe98 req-3c144701-b726-4638-8905-ac9c05a0bdc6 service nova] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Detach interface failed, port_id=2fdf0128-ddf8-4030-a2be-bf738efcd699, reason: Instance 80a9e17e-4095-498c-80c8-200bfb4f3d1f could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1598.768310] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.768780] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1598.771518] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.116s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.773467] env[62508]: INFO nova.compute.claims [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1598.785672] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.928316] env[62508]: DEBUG nova.network.neutron [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.133618] env[62508]: INFO nova.compute.manager [-] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Took 1.55 seconds to deallocate network for instance. [ 1599.275558] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.280834] env[62508]: DEBUG nova.compute.utils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.282804] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1599.431818] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.462665] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0ad59c9a97466e59314028929b60ecd1',container_format='bare',created_at=2024-12-11T22:13:39Z,direct_url=,disk_format='vmdk',id=d17b0134-9e41-4671-a49a-a34df9610594,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-764568000-shelved',owner='b9b1180071bc4cc2a419daac2f51e3f1',properties=ImageMetaProps,protected=,size=31591936,status='active',tags=,updated_at=2024-12-11T22:13:55Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1599.462822] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1599.463126] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1599.463265] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1599.463432] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1599.463644] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1599.463872] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1599.464050] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1599.464227] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1599.464396] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1599.464627] env[62508]: DEBUG nova.virt.hardware [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1599.465617] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc56e112-49b5-465c-b70c-66d9518a49d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.477391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c48856b-f612-4800-8ec7-fa61719ba332 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.494979] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:6a:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd829efb7-e98e-4b67-bd03-b0888287dbfd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd3f776a-1eed-4e8d-b7f3-d958db372a2f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1599.503137] env[62508]: DEBUG oslo.service.loopingcall [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.503456] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1599.503712] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f43aa40-d353-451b-93dd-b58fa89551d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.524443] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1599.524443] env[62508]: value = "task-1776118" [ 1599.524443] env[62508]: _type = "Task" [ 1599.524443] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.535190] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776118, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.641503] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.778749] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.787048] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1600.038484] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776118, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.136587] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabbfb97-9708-4792-aa47-40270920290a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.141486] env[62508]: DEBUG nova.compute.manager [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1600.141682] env[62508]: DEBUG nova.compute.manager [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing instance network info cache due to event network-changed-bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1600.141946] env[62508]: DEBUG oslo_concurrency.lockutils [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.142123] env[62508]: DEBUG oslo_concurrency.lockutils [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.142292] env[62508]: DEBUG nova.network.neutron [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Refreshing network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.150603] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44c17ca-86b1-49af-980e-556de7d6a0a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.190478] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34333d25-414d-4949-8628-007769c76a40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.201252] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaf02e3-544b-405f-82f6-5879dc9e76e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.218360] env[62508]: DEBUG nova.compute.provider_tree [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.275129] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776117, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.495025} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.275439] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cce07762-d034-41a0-9778-c0b0dab30ae0/cce07762-d034-41a0-9778-c0b0dab30ae0.vmdk to [datastore1] 14c911d6-44c2-4c56-a027-3d25a1e58bcc/14c911d6-44c2-4c56-a027-3d25a1e58bcc.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1600.276310] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25d2f7-0f74-4875-8f72-952bae17a9a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.304396] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 14c911d6-44c2-4c56-a027-3d25a1e58bcc/14c911d6-44c2-4c56-a027-3d25a1e58bcc.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1600.304624] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b50a5dff-ad86-4d46-84ea-579d6a07685d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.325615] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1600.325615] env[62508]: value = "task-1776119" [ 1600.325615] env[62508]: _type = "Task" [ 1600.325615] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.335713] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776119, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.536580] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776118, 'name': CreateVM_Task, 'duration_secs': 0.951404} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.536747] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1600.537432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.537601] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.537979] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1600.538263] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7663375b-e38f-4d9e-9e99-38718e43b834 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.542411] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1600.542411] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528676c4-57da-c456-3c22-9b82f49377f5" [ 1600.542411] env[62508]: _type = "Task" [ 1600.542411] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.549678] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528676c4-57da-c456-3c22-9b82f49377f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.721146] env[62508]: DEBUG nova.scheduler.client.report [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1600.805745] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1600.831761] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1600.831999] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1600.832170] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1600.832355] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1600.832501] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1600.832648] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1600.832851] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1600.833042] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1600.833209] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1600.833378] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1600.833553] env[62508]: DEBUG nova.virt.hardware [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1600.834373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf06876f-de2d-4a38-8995-c51709060c70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.844530] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c2cc2c-5f25-4414-a1a4-139c03ff63a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.848390] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776119, 'name': ReconfigVM_Task, 'duration_secs': 0.282498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.850500] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 14c911d6-44c2-4c56-a027-3d25a1e58bcc/14c911d6-44c2-4c56-a027-3d25a1e58bcc.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1600.851384] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6797f842-3214-46b1-bb94-c3a234ea9dfb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.860765] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1600.866254] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Creating folder: Project (e229377f36e4445084f899e6340f918a). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1600.867398] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e68c016-bc04-43c0-88a6-6e28eec5b49c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.868919] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1600.868919] env[62508]: value = "task-1776120" [ 1600.868919] env[62508]: _type = "Task" [ 1600.868919] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.876964] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776120, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.878496] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Created folder: Project (e229377f36e4445084f899e6340f918a) in parent group-v368536. [ 1600.878632] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Creating folder: Instances. Parent ref: group-v368734. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1600.878843] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d729e5c1-ac63-413b-a8ea-4f9fee8fe299 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.887712] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Created folder: Instances in parent group-v368734. [ 1600.887954] env[62508]: DEBUG oslo.service.loopingcall [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.888580] env[62508]: DEBUG nova.network.neutron [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updated VIF entry in instance network info cache for port bd3f776a-1eed-4e8d-b7f3-d958db372a2f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1600.888908] env[62508]: DEBUG nova.network.neutron [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.890021] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1600.890242] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65c2c9eb-58b0-43e4-be5f-7978dee0d035 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.907035] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1600.907035] env[62508]: value = "task-1776123" [ 1600.907035] env[62508]: _type = "Task" [ 1600.907035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.914405] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776123, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.052070] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.052325] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Processing image d17b0134-9e41-4671-a49a-a34df9610594 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1601.052557] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.052705] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.052880] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1601.053179] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea4443a2-ca58-4b6f-b576-5f4ee47566c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.064356] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1601.064608] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1601.065465] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d8668b6-a7c2-4f5e-9515-2c2143c958dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.070303] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1601.070303] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e8e007-6645-62ee-7515-cc2e332a0629" [ 1601.070303] env[62508]: _type = "Task" [ 1601.070303] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.077578] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e8e007-6645-62ee-7515-cc2e332a0629, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.226047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.226607] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1601.229837] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.192s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.231407] env[62508]: INFO nova.compute.claims [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1601.379572] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776120, 'name': Rename_Task, 'duration_secs': 0.161893} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.379836] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1601.380105] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1c968c4-1cb5-4426-a546-a3160b6c48e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.386456] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1601.386456] env[62508]: value = "task-1776124" [ 1601.386456] env[62508]: _type = "Task" [ 1601.386456] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.394034] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.403301] env[62508]: DEBUG oslo_concurrency.lockutils [req-970749c8-5556-4802-a379-f29a7a0321dc req-0d82a6c3-0fae-4517-86eb-9365beca8265 service nova] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.415971] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776123, 'name': CreateVM_Task, 'duration_secs': 0.365528} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.416178] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1601.416581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.416741] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.417068] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1601.417323] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d59d40fb-0a08-4947-a6b8-d99b560b1be9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.421637] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1601.421637] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205d66f-7bf7-f425-88e9-1c0c0551ba05" [ 1601.421637] env[62508]: _type = "Task" [ 1601.421637] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.429291] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205d66f-7bf7-f425-88e9-1c0c0551ba05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.580854] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1601.581179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Fetch image to [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f/OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1601.581392] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Downloading stream optimized image d17b0134-9e41-4671-a49a-a34df9610594 to [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f/OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f.vmdk on the data store datastore1 as vApp {{(pid=62508) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1601.581548] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Downloading image file data d17b0134-9e41-4671-a49a-a34df9610594 to the ESX as VM named 'OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f' {{(pid=62508) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1601.653756] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1601.653756] env[62508]: value = "resgroup-9" [ 1601.653756] env[62508]: _type = "ResourcePool" [ 1601.653756] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1601.653756] env[62508]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0f0525b4-c1ee-493f-8489-655b6fa1b4e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.675178] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lease: (returnval){ [ 1601.675178] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1601.675178] env[62508]: _type = "HttpNfcLease" [ 1601.675178] env[62508]: } obtained for vApp import into resource pool (val){ [ 1601.675178] env[62508]: value = "resgroup-9" [ 1601.675178] env[62508]: _type = "ResourcePool" [ 1601.675178] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1601.675772] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the lease: (returnval){ [ 1601.675772] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1601.675772] env[62508]: _type = "HttpNfcLease" [ 1601.675772] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1601.681493] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1601.681493] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1601.681493] env[62508]: _type = "HttpNfcLease" [ 1601.681493] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1601.732363] env[62508]: DEBUG nova.compute.utils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1601.734084] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1601.897182] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776124, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.933066] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205d66f-7bf7-f425-88e9-1c0c0551ba05, 'name': SearchDatastore_Task, 'duration_secs': 0.00978} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.933066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.933066] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1601.933349] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.933506] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.933689] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1601.934049] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28a81253-8584-44df-841d-c9b894ae3c34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.942428] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1601.942428] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1601.942670] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be70b2c0-4d4a-49e0-9e1d-5bc522808065 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.948275] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1601.948275] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5222bc42-7414-6770-6590-f66206e41d01" [ 1601.948275] env[62508]: _type = "Task" [ 1601.948275] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.958714] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5222bc42-7414-6770-6590-f66206e41d01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.183825] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1602.183825] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1602.183825] env[62508]: _type = "HttpNfcLease" [ 1602.183825] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1602.235798] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1602.325561] env[62508]: DEBUG nova.compute.manager [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1602.326982] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b47ce9d-db8e-40b2-8aa4-af5cd2a7a891 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.398414] env[62508]: DEBUG oslo_vmware.api [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776124, 'name': PowerOnVM_Task, 'duration_secs': 0.522827} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.398681] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1602.398868] env[62508]: INFO nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Took 16.65 seconds to spawn the instance on the hypervisor. [ 1602.399054] env[62508]: DEBUG nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1602.399810] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505f1372-a003-4f85-8c31-21197e808e90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.458250] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5222bc42-7414-6770-6590-f66206e41d01, 'name': SearchDatastore_Task, 'duration_secs': 0.009872} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.461315] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad164e35-c761-4e0f-8771-0344c0cbab72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.467024] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1602.467024] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52338edd-9820-e79a-4589-ba00fb066bfa" [ 1602.467024] env[62508]: _type = "Task" [ 1602.467024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.474909] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52338edd-9820-e79a-4589-ba00fb066bfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.547977] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c89ad4-9e61-41f8-a30f-4946b4a24251 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.555917] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2878f54-7157-42fe-b3cc-c91b0a313178 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.587190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3580df0c-cba2-4648-8182-e9eda1c079c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.594725] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8bdb17-061f-489e-b65b-60ff1608c1e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.608480] env[62508]: DEBUG nova.compute.provider_tree [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1602.684327] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1602.684327] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1602.684327] env[62508]: _type = "HttpNfcLease" [ 1602.684327] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1602.684708] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1602.684708] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528fb748-68fb-e8cb-6b21-897f860e019f" [ 1602.684708] env[62508]: _type = "HttpNfcLease" [ 1602.684708] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1602.685524] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2190c421-3fad-48b2-a14c-319226c35553 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.692744] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1602.692900] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating HTTP connection to write to file with size = 31591936 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1602.758089] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d343d8d4-2fb4-4d28-a423-97fdea51b513 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.838893] env[62508]: INFO nova.compute.manager [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] instance snapshotting [ 1602.839549] env[62508]: DEBUG nova.objects.instance [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1602.918024] env[62508]: INFO nova.compute.manager [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Took 27.84 seconds to build instance. [ 1602.978645] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52338edd-9820-e79a-4589-ba00fb066bfa, 'name': SearchDatastore_Task, 'duration_secs': 0.009466} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.979903] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.980191] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a72fd09e-a3be-486a-a03b-8c25b04d82d0/a72fd09e-a3be-486a-a03b-8c25b04d82d0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1602.980487] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64cef9c2-94e6-4484-8b5f-8dad4c2d14ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.987396] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1602.987396] env[62508]: value = "task-1776126" [ 1602.987396] env[62508]: _type = "Task" [ 1602.987396] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.995406] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.111815] env[62508]: DEBUG nova.scheduler.client.report [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1603.256532] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1603.281287] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1603.281673] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1603.281872] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1603.282155] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1603.282334] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1603.282534] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1603.282778] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1603.282994] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1603.283219] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1603.283394] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1603.283641] env[62508]: DEBUG nova.virt.hardware [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1603.284746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924b3172-e631-4af4-9b1d-0ca4477c08b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.299535] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c705fcd-128a-406b-8b33-f62dd3ed07e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.315637] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.321730] env[62508]: DEBUG oslo.service.loopingcall [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.322023] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1603.322263] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49a15bdb-6f20-4b9e-8cea-15b0bbd48d91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.339906] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.339906] env[62508]: value = "task-1776127" [ 1603.339906] env[62508]: _type = "Task" [ 1603.339906] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.346796] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85930075-7bcc-411c-b182-6a6043fe57d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.352487] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776127, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.371165] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16792d4d-cf2e-4589-a60d-7837631f6cc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.419455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dbd54906-8a2e-4da9-b60d-10b40d15af26 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.359s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.500928] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776126, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.528130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.528505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.528767] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.528961] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.529145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.533195] env[62508]: INFO nova.compute.manager [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Terminating instance [ 1603.535205] env[62508]: DEBUG nova.compute.manager [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1603.535456] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1603.536378] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b5b141-d59a-41ef-9a17-11ef96f2de9f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.546061] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.546298] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d143d07f-2793-4f64-adce-4693449a50bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.554629] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1603.554629] env[62508]: value = "task-1776128" [ 1603.554629] env[62508]: _type = "Task" [ 1603.554629] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.565472] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.618556] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.619303] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1603.626125] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.151s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.627902] env[62508]: INFO nova.compute.claims [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1603.850311] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776127, 'name': CreateVM_Task, 'duration_secs': 0.379037} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.850462] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1603.850927] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.851111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.851433] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1603.851709] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-315e1f1d-bf3f-458c-ae43-f40efe935af5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.856707] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1603.856707] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d89f9f-5425-7eab-4a55-47ea043faa6a" [ 1603.856707] env[62508]: _type = "Task" [ 1603.856707] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.864421] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d89f9f-5425-7eab-4a55-47ea043faa6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.883120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1603.883484] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cdb39553-dc14-46bd-9ed8-1e2a8554ad41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.892209] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1603.892209] env[62508]: value = "task-1776129" [ 1603.892209] env[62508]: _type = "Task" [ 1603.892209] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.900423] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776129, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.000213] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51456} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.000524] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a72fd09e-a3be-486a-a03b-8c25b04d82d0/a72fd09e-a3be-486a-a03b-8c25b04d82d0.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1604.000803] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1604.001018] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03aad6b9-184b-4f74-95fe-7193285a8135 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.010696] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1604.010696] env[62508]: value = "task-1776130" [ 1604.010696] env[62508]: _type = "Task" [ 1604.010696] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.019377] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.042062] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1604.042282] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1604.043195] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7788983-b081-43be-b56d-c7610f9b44ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.049942] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1604.050126] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1604.050358] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c782763b-fe5c-406d-90e7-088d062fe10a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.062979] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776128, 'name': PowerOffVM_Task, 'duration_secs': 0.197758} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.063274] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1604.063445] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1604.063680] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ee82980-41a4-4bbf-92d1-9d10938eae10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.133038] env[62508]: DEBUG nova.compute.utils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1604.136893] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1604.137040] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1604.180194] env[62508]: DEBUG nova.policy [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74c20248784c3ca734e528856f21f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce3b480c3c81499599aef114f92775cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1604.244746] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1604.244966] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1604.245178] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] 14c911d6-44c2-4c56-a027-3d25a1e58bcc {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1604.245480] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4a61aaa-394d-4ee6-869c-48ad126ba73a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.252216] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1604.252216] env[62508]: value = "task-1776132" [ 1604.252216] env[62508]: _type = "Task" [ 1604.252216] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.260139] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.270763] env[62508]: DEBUG oslo_vmware.rw_handles [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b71b71-a542-dbd6-ffb1-02c96c6040c1/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1604.271168] env[62508]: INFO nova.virt.vmwareapi.images [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Downloaded image file data d17b0134-9e41-4671-a49a-a34df9610594 [ 1604.271917] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a42c59-5a45-4208-b1f5-e6f0a419caac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.290991] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ecd8831-52d2-4a5f-8e77-8228d4d2fc1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.314503] env[62508]: INFO nova.virt.vmwareapi.images [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] The imported VM was unregistered [ 1604.316776] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1604.317019] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1604.317359] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af4726e6-8531-4da5-8139-2783b8ad5d91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.327932] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Created directory with path [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1604.328181] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f/OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f.vmdk to [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk. {{(pid=62508) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1604.328464] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-28a39a64-88db-4d79-bf51-1d2dcc33a657 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.336025] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1604.336025] env[62508]: value = "task-1776134" [ 1604.336025] env[62508]: _type = "Task" [ 1604.336025] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.345092] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.367486] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d89f9f-5425-7eab-4a55-47ea043faa6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010657} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.367990] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.368375] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1604.368717] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.368964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.369252] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1604.369604] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb17c3b7-9cb7-41b2-b2c2-2dacda75e7d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.390306] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1604.390306] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1604.390306] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d15e38-641a-4914-9313-3c96e8d3adc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.402776] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1604.402776] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528b9caf-551e-a9e0-e43a-10b7e85122f0" [ 1604.402776] env[62508]: _type = "Task" [ 1604.402776] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.407249] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776129, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.416637] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528b9caf-551e-a9e0-e43a-10b7e85122f0, 'name': SearchDatastore_Task, 'duration_secs': 0.008537} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.417628] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e47bd9-53dd-4ec2-8c75-ac62902b84d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.426030] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1604.426030] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5226bb37-547d-ed46-65ea-fa8725ffa748" [ 1604.426030] env[62508]: _type = "Task" [ 1604.426030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.431270] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5226bb37-547d-ed46-65ea-fa8725ffa748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.486802] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Successfully created port: 8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1604.520401] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069108} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.520685] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1604.521514] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9505ba97-0535-40dc-9216-da63629d2f6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.542148] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] a72fd09e-a3be-486a-a03b-8c25b04d82d0/a72fd09e-a3be-486a-a03b-8c25b04d82d0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1604.542485] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-725374d1-6a67-4b1b-8359-98385a34d31c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.563891] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1604.563891] env[62508]: value = "task-1776135" [ 1604.563891] env[62508]: _type = "Task" [ 1604.563891] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.574164] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.638208] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1604.765026] env[62508]: DEBUG oslo_vmware.api [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146976} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.765346] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1604.765533] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1604.765707] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1604.765878] env[62508]: INFO nova.compute.manager [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1604.769944] env[62508]: DEBUG oslo.service.loopingcall [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.769944] env[62508]: DEBUG nova.compute.manager [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1604.769944] env[62508]: DEBUG nova.network.neutron [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1604.852538] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.903862] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776129, 'name': CreateSnapshot_Task, 'duration_secs': 0.61066} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.906978] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1604.908438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb639c2-331f-4e1f-bb0d-8a4ce9159e7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.939705] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5226bb37-547d-ed46-65ea-fa8725ffa748, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.943940] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.943940] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1604.943940] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b36190cc-7c65-4227-9dbe-49751058c0e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.952783] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1604.952783] env[62508]: value = "task-1776136" [ 1604.952783] env[62508]: _type = "Task" [ 1604.952783] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.965250] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.031293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec40cd60-6f6e-4412-a3c6-e57a3174bde4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.039469] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862e8a9e-305f-477e-9abd-6ae1c6fb2953 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.077032] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c381227a-c3ad-47f2-bc7c-27d470afe9f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.083353] env[62508]: DEBUG nova.compute.manager [req-a9b88f49-4aff-4aca-a96a-37c84b7eef7d req-4b99ce9e-ea75-4672-8c3a-023368a334fd service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Received event network-vif-deleted-e32abf3b-06cd-43e5-a59b-616b6d7d3ff2 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.083353] env[62508]: INFO nova.compute.manager [req-a9b88f49-4aff-4aca-a96a-37c84b7eef7d req-4b99ce9e-ea75-4672-8c3a-023368a334fd service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Neutron deleted interface e32abf3b-06cd-43e5-a59b-616b6d7d3ff2; detaching it from the instance and deleting it from the info cache [ 1605.084062] env[62508]: DEBUG nova.network.neutron [req-a9b88f49-4aff-4aca-a96a-37c84b7eef7d req-4b99ce9e-ea75-4672-8c3a-023368a334fd service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.090273] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.092299] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604c7d84-5073-4b6e-911e-b8a929dd9e18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.108934] env[62508]: DEBUG nova.compute.provider_tree [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.348181] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.434673] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1605.435079] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-903b4afd-5590-4747-9a8f-42ac7e593762 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.444721] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1605.444721] env[62508]: value = "task-1776137" [ 1605.444721] env[62508]: _type = "Task" [ 1605.444721] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.454696] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776137, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.462700] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.582895] env[62508]: DEBUG nova.network.neutron [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.583746] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.593412] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b06f472-6174-4cbb-af5a-0d1d184ba83a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.603936] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25615e0c-1fc0-4e96-9071-656eb9ff7e57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.617484] env[62508]: DEBUG nova.scheduler.client.report [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1605.647208] env[62508]: DEBUG nova.compute.manager [req-a9b88f49-4aff-4aca-a96a-37c84b7eef7d req-4b99ce9e-ea75-4672-8c3a-023368a334fd service nova] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Detach interface failed, port_id=e32abf3b-06cd-43e5-a59b-616b6d7d3ff2, reason: Instance 14c911d6-44c2-4c56-a027-3d25a1e58bcc could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1605.650719] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1605.679458] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1605.679800] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1605.679882] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1605.680037] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1605.680198] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1605.680350] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1605.680578] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1605.680734] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1605.680904] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1605.681074] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1605.681256] env[62508]: DEBUG nova.virt.hardware [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1605.682173] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dbab91-5c81-4125-a60a-c5cb2b59fee9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.690943] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067931ef-7087-40b0-97f2-3b9713fa683c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.848825] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.963022] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776137, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.972427] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.979479] env[62508]: DEBUG nova.compute.manager [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Received event network-vif-plugged-8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.979479] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] Acquiring lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.979479] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.979479] env[62508]: DEBUG oslo_concurrency.lockutils [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.979479] env[62508]: DEBUG nova.compute.manager [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] No waiting events found dispatching network-vif-plugged-8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1605.979479] env[62508]: WARNING nova.compute.manager [req-7c348fca-6d00-4a5a-857d-faa2df80bcac req-4ca5c90a-a539-4006-8d9a-97a9abb664b3 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Received unexpected event network-vif-plugged-8570ede3-d3fc-41d9-90a0-3dc1ef777446 for instance with vm_state building and task_state spawning. [ 1606.083614] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.084852] env[62508]: INFO nova.compute.manager [-] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Took 1.32 seconds to deallocate network for instance. [ 1606.091198] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Successfully updated port: 8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1606.121974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.122598] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1606.126296] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.012s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.126577] env[62508]: DEBUG nova.objects.instance [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lazy-loading 'resources' on Instance uuid a9b92a6d-154c-42bb-842c-bc42a07299a0 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.350025] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.458710] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776137, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.473237] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.583501] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.595280] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.595504] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.595672] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.597640] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.627959] env[62508]: DEBUG nova.compute.utils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1606.629423] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1606.629562] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1606.675776] env[62508]: DEBUG nova.policy [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74c20248784c3ca734e528856f21f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce3b480c3c81499599aef114f92775cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1606.850460] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.952847] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Successfully created port: 934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1606.964477] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776137, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.977300] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.003011] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0935ed1a-442d-4bb9-93c2-19ba2eaffbeb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.011034] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901419b9-a032-430d-b262-f3ec745f0d78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.044064] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb993349-45bd-473c-8676-69e6bfcfc934 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.052758] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed98693-efc0-4aa2-ab7f-e40b03bf8896 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.066910] env[62508]: DEBUG nova.compute.provider_tree [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1607.081044] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.132771] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1607.135481] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1607.293813] env[62508]: DEBUG nova.network.neutron [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Updating instance_info_cache with network_info: [{"id": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "address": "fa:16:3e:00:82:71", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8570ede3-d3", "ovs_interfaceid": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.349907] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776134, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.956313} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.350199] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f/OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f.vmdk to [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk. [ 1607.350386] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Cleaning up location [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1607.350549] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_34cc08b3-b85f-4fbb-8985-78df020ed76f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1607.350824] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9347883e-6581-4d4f-9e67-f67eca2e5b1f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.357785] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1607.357785] env[62508]: value = "task-1776138" [ 1607.357785] env[62508]: _type = "Task" [ 1607.357785] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.366302] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.457893] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776137, 'name': CloneVM_Task, 'duration_secs': 1.621763} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.458177] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created linked-clone VM from snapshot [ 1607.458963] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3895198-85f9-4d2c-b2ff-5ced43957b9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.466287] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploading image 57b290e8-da3a-4e9f-9233-d8f772b973bf {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1607.475900] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776136, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.349156} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.476162] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1607.476394] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1607.476648] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12c424e7-a985-4b51-8434-7a79e2d0a251 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.482992] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1607.482992] env[62508]: value = "task-1776139" [ 1607.482992] env[62508]: _type = "Task" [ 1607.482992] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.488129] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1607.488129] env[62508]: value = "vm-368740" [ 1607.488129] env[62508]: _type = "VirtualMachine" [ 1607.488129] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1607.488370] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e8c142ad-bf57-425e-b873-6976e776ca87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.492852] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.498276] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease: (returnval){ [ 1607.498276] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205308b-2d64-2a54-57bc-860e5017a118" [ 1607.498276] env[62508]: _type = "HttpNfcLease" [ 1607.498276] env[62508]: } obtained for exporting VM: (result){ [ 1607.498276] env[62508]: value = "vm-368740" [ 1607.498276] env[62508]: _type = "VirtualMachine" [ 1607.498276] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1607.498570] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the lease: (returnval){ [ 1607.498570] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205308b-2d64-2a54-57bc-860e5017a118" [ 1607.498570] env[62508]: _type = "HttpNfcLease" [ 1607.498570] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1607.505388] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1607.505388] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205308b-2d64-2a54-57bc-860e5017a118" [ 1607.505388] env[62508]: _type = "HttpNfcLease" [ 1607.505388] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1607.572472] env[62508]: DEBUG nova.scheduler.client.report [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1607.585080] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776135, 'name': ReconfigVM_Task, 'duration_secs': 2.734423} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.585426] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Reconfigured VM instance instance-00000047 to attach disk [datastore1] a72fd09e-a3be-486a-a03b-8c25b04d82d0/a72fd09e-a3be-486a-a03b-8c25b04d82d0.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1607.586044] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46c2c3f7-7a0b-432e-8d2f-2b9749f6ed61 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.593239] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1607.593239] env[62508]: value = "task-1776141" [ 1607.593239] env[62508]: _type = "Task" [ 1607.593239] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.601564] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776141, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.796677] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.797237] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance network_info: |[{"id": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "address": "fa:16:3e:00:82:71", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8570ede3-d3", "ovs_interfaceid": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1607.797628] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:82:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8570ede3-d3fc-41d9-90a0-3dc1ef777446', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1607.805262] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating folder: Project (ce3b480c3c81499599aef114f92775cd). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1607.806226] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-038c9c8f-d6fd-402f-b26d-3fec8bd1bfc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.816790] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created folder: Project (ce3b480c3c81499599aef114f92775cd) in parent group-v368536. [ 1607.816965] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating folder: Instances. Parent ref: group-v368741. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1607.817200] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9714916b-0279-47f8-8ef1-14b9493369e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.825874] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created folder: Instances in parent group-v368741. [ 1607.826101] env[62508]: DEBUG oslo.service.loopingcall [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.826280] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1607.826477] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0ed0125-fa11-4680-9933-17278a1487ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.845393] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1607.845393] env[62508]: value = "task-1776144" [ 1607.845393] env[62508]: _type = "Task" [ 1607.845393] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.853841] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776144, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.865045] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039696} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.865341] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.865517] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.865753] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk to [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1607.865968] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4a25725-bbb5-4451-ac36-907971b18798 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.871945] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1607.871945] env[62508]: value = "task-1776145" [ 1607.871945] env[62508]: _type = "Task" [ 1607.871945] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.882842] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.993062] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.993357] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1607.994122] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a839868-473f-44a3-b2aa-8d99ce26f6d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.013818] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1608.017846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e31a84d-a2aa-4bde-9e88-3637fc098e80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.033415] env[62508]: DEBUG nova.compute.manager [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Received event network-changed-8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1608.033617] env[62508]: DEBUG nova.compute.manager [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Refreshing instance network info cache due to event network-changed-8570ede3-d3fc-41d9-90a0-3dc1ef777446. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1608.033818] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] Acquiring lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.033997] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] Acquired lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.034222] env[62508]: DEBUG nova.network.neutron [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Refreshing network info cache for port 8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.040375] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1608.040375] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205308b-2d64-2a54-57bc-860e5017a118" [ 1608.040375] env[62508]: _type = "HttpNfcLease" [ 1608.040375] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1608.041657] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1608.041657] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5205308b-2d64-2a54-57bc-860e5017a118" [ 1608.041657] env[62508]: _type = "HttpNfcLease" [ 1608.041657] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1608.041943] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1608.041943] env[62508]: value = "task-1776146" [ 1608.041943] env[62508]: _type = "Task" [ 1608.041943] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.042659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6d19aa-f1e0-4b71-a070-a82781d55f60 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.054390] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1608.054569] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1608.059351] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776146, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.115496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.118260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.756s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.119858] env[62508]: INFO nova.compute.claims [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1608.133481] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776141, 'name': Rename_Task, 'duration_secs': 0.139689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.133948] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1608.134055] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4759454c-bdf0-417d-80b5-c5dd0fa82db4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.140419] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1608.140419] env[62508]: value = "task-1776147" [ 1608.140419] env[62508]: _type = "Task" [ 1608.140419] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.141704] env[62508]: INFO nova.scheduler.client.report [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Deleted allocations for instance a9b92a6d-154c-42bb-842c-bc42a07299a0 [ 1608.149013] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1608.157539] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.171770] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f3cff224-4aae-4764-8c92-13e6304b55ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.181254] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1608.181705] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1608.181797] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.181975] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1608.182139] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.182302] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1608.182496] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1608.182682] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1608.182819] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1608.182979] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1608.183192] env[62508]: DEBUG nova.virt.hardware [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1608.184399] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9220161-d525-4dd1-9555-4d7c1c8a5896 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.193619] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053f740f-8c9a-45cf-aa74-a0baf12b5f3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.297497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.297497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.297497] env[62508]: DEBUG nova.objects.instance [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid 70c8de27-4696-4005-bbec-e7a33e56311b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.355772] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776144, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.382399] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.556678] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776146, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.655720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2fddb3-691b-4fd2-8901-ebd3d744a3d8 tempest-ImagesOneServerNegativeTestJSON-1241665393 tempest-ImagesOneServerNegativeTestJSON-1241665393-project-member] Lock "a9b92a6d-154c-42bb-842c-bc42a07299a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.666734] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.709998] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Successfully updated port: 934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1608.790325] env[62508]: DEBUG nova.network.neutron [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Updated VIF entry in instance network info cache for port 8570ede3-d3fc-41d9-90a0-3dc1ef777446. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.790810] env[62508]: DEBUG nova.network.neutron [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Updating instance_info_cache with network_info: [{"id": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "address": "fa:16:3e:00:82:71", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8570ede3-d3", "ovs_interfaceid": "8570ede3-d3fc-41d9-90a0-3dc1ef777446", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.856526] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776144, 'name': CreateVM_Task, 'duration_secs': 0.972238} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.856727] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.857610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.857771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.858281] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.858669] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f535687-b9b6-4934-af67-70fb9ed7da4a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.863820] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1608.863820] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a9f0cf-b2d9-50c1-4a2a-a4fc0d4241fd" [ 1608.863820] env[62508]: _type = "Task" [ 1608.863820] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.873517] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a9f0cf-b2d9-50c1-4a2a-a4fc0d4241fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.878112] env[62508]: DEBUG nova.objects.instance [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid 70c8de27-4696-4005-bbec-e7a33e56311b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.882790] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.056636] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776146, 'name': ReconfigVM_Task, 'duration_secs': 0.890096} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.057100] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1609.057865] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ef10e2b-bf0f-4036-a7ed-bdb304710eaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.064556] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1609.064556] env[62508]: value = "task-1776148" [ 1609.064556] env[62508]: _type = "Task" [ 1609.064556] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.073849] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776148, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.155257] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.213269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.213269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.213269] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.294219] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6d28281-e884-48d4-8655-2dd2157de811 req-1236ce76-bcfb-4969-88f8-6fd071ff3c09 service nova] Releasing lock "refresh_cache-bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.375611] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a9f0cf-b2d9-50c1-4a2a-a4fc0d4241fd, 'name': SearchDatastore_Task, 'duration_secs': 0.107719} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.381896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.382196] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.382863] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.382863] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.382863] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.383797] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9594b077-267d-47c4-8407-5d16af98cb0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.386788] env[62508]: DEBUG nova.objects.base [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance<70c8de27-4696-4005-bbec-e7a33e56311b> lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1609.387656] env[62508]: DEBUG nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.396651] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.413257] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.413785] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.417754] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92b4b3d4-724d-4e96-b255-d14c4474b521 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.428523] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1609.428523] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52628bb5-35a3-7760-2517-2b412e24968c" [ 1609.428523] env[62508]: _type = "Task" [ 1609.428523] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.438053] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52628bb5-35a3-7760-2517-2b412e24968c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.449488] env[62508]: DEBUG nova.policy [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1609.535497] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16c66bd-6089-4994-bedb-5b853c6a3c4d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.543613] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79af17f-64e6-4ea1-b556-2a0a1ba5e3b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.584615] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37215763-6e5f-447d-8f43-7ae43a5f4aa3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.593452] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776148, 'name': Rename_Task, 'duration_secs': 0.375131} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.596376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1609.596929] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9938475d-f023-4bff-8c6b-87d20235ba2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.599908] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec50ccd-a855-4442-8f5d-c54df591f69d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.615815] env[62508]: DEBUG nova.compute.provider_tree [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.619163] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1609.619163] env[62508]: value = "task-1776149" [ 1609.619163] env[62508]: _type = "Task" [ 1609.619163] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.633928] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776149, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.654626] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.749023] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1609.868167] env[62508]: DEBUG nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Successfully created port: 3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1609.886486] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.934887] env[62508]: DEBUG nova.network.neutron [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Updating instance_info_cache with network_info: [{"id": "934ffcfc-e193-4a25-9167-be27718f24af", "address": "fa:16:3e:5a:05:95", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap934ffcfc-e1", "ovs_interfaceid": "934ffcfc-e193-4a25-9167-be27718f24af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.943043] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52628bb5-35a3-7760-2517-2b412e24968c, 'name': SearchDatastore_Task, 'duration_secs': 0.085973} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.944074] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-312a7dc6-f331-4445-ac1c-b0fb4cb00cc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.950915] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1609.950915] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bee857-4ff2-29f7-1026-cf3e5455cb02" [ 1609.950915] env[62508]: _type = "Task" [ 1609.950915] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.960669] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bee857-4ff2-29f7-1026-cf3e5455cb02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.035489] env[62508]: DEBUG nova.compute.manager [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Received event network-vif-plugged-934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1610.035744] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Acquiring lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.035971] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.036497] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.036688] env[62508]: DEBUG nova.compute.manager [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] No waiting events found dispatching network-vif-plugged-934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1610.036874] env[62508]: WARNING nova.compute.manager [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Received unexpected event network-vif-plugged-934ffcfc-e193-4a25-9167-be27718f24af for instance with vm_state building and task_state spawning. [ 1610.037123] env[62508]: DEBUG nova.compute.manager [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Received event network-changed-934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1610.037307] env[62508]: DEBUG nova.compute.manager [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Refreshing instance network info cache due to event network-changed-934ffcfc-e193-4a25-9167-be27718f24af. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1610.038236] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Acquiring lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.121658] env[62508]: DEBUG nova.scheduler.client.report [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1610.143985] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.158531] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.386489] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.437950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.438369] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Instance network_info: |[{"id": "934ffcfc-e193-4a25-9167-be27718f24af", "address": "fa:16:3e:5a:05:95", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap934ffcfc-e1", "ovs_interfaceid": "934ffcfc-e193-4a25-9167-be27718f24af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1610.438711] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Acquired lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.438916] env[62508]: DEBUG nova.network.neutron [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Refreshing network info cache for port 934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1610.440289] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:05:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '934ffcfc-e193-4a25-9167-be27718f24af', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1610.449288] env[62508]: DEBUG oslo.service.loopingcall [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.450008] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1610.450319] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc0614a8-9445-48eb-930b-d6f7824cee33 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.477193] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bee857-4ff2-29f7-1026-cf3e5455cb02, 'name': SearchDatastore_Task, 'duration_secs': 0.089264} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.479203] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.479203] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.479480] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1610.479480] env[62508]: value = "task-1776150" [ 1610.479480] env[62508]: _type = "Task" [ 1610.479480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.479682] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9b3ef30-a474-41e9-a336-87f9f4a69ba4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.490342] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1610.490342] env[62508]: value = "task-1776151" [ 1610.490342] env[62508]: _type = "Task" [ 1610.490342] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.493837] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.637790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.638680] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1610.641169] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.641650] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.278s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.643123] env[62508]: INFO nova.compute.claims [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1610.656892] env[62508]: DEBUG oslo_vmware.api [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776147, 'name': PowerOnVM_Task, 'duration_secs': 2.462985} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.657253] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1610.657530] env[62508]: INFO nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Took 9.85 seconds to spawn the instance on the hypervisor. [ 1610.657738] env[62508]: DEBUG nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1610.659285] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734ec0ba-d528-4ef0-a9b6-9e1f28e30a8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.890556] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776145, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.541203} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.890868] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d17b0134-9e41-4671-a49a-a34df9610594/d17b0134-9e41-4671-a49a-a34df9610594.vmdk to [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.891809] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a8e9a3-10e1-4997-85a0-9a3d48d6114b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.920723] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1610.920914] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce0735c-c385-4a84-9d6d-38274d2575c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.942123] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1610.942123] env[62508]: value = "task-1776152" [ 1610.942123] env[62508]: _type = "Task" [ 1610.942123] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.952923] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776152, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.996601] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.004881] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.137752] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776149, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.145250] env[62508]: DEBUG nova.compute.utils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1611.145250] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1611.145250] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1611.180655] env[62508]: INFO nova.compute.manager [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Took 19.69 seconds to build instance. [ 1611.225196] env[62508]: DEBUG nova.policy [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c96cc4a58a4321837c1ab8badc686a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0156fba01363470eaa9771d5f296f730', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1611.229356] env[62508]: DEBUG nova.network.neutron [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Updated VIF entry in instance network info cache for port 934ffcfc-e193-4a25-9167-be27718f24af. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1611.229945] env[62508]: DEBUG nova.network.neutron [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Updating instance_info_cache with network_info: [{"id": "934ffcfc-e193-4a25-9167-be27718f24af", "address": "fa:16:3e:5a:05:95", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap934ffcfc-e1", "ovs_interfaceid": "934ffcfc-e193-4a25-9167-be27718f24af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.463164] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776152, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.494548] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.505492] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776151, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.616641] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Successfully created port: 8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1611.639114] env[62508]: DEBUG oslo_vmware.api [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776149, 'name': PowerOnVM_Task, 'duration_secs': 1.550187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.639419] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1611.639639] env[62508]: INFO nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1611.639815] env[62508]: DEBUG nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1611.640714] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a77abf-ca47-4456-ba0b-6f5a29dd8c10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.649985] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1611.683624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-948d18e2-ec9e-4e4b-b4d7-d643959cbd87 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.256s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.733973] env[62508]: DEBUG oslo_concurrency.lockutils [req-d4b70a5c-9801-4437-886d-d79ee3677a27 req-a4510ea3-d402-4f2e-8eaf-166025002b50 service nova] Releasing lock "refresh_cache-f465712f-f65a-4521-90ab-e9f5c5b6de5f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.957170] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776152, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.003171] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.011433] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776151, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.070871} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.012144] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1612.012446] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1612.012829] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-908f4c11-df26-447f-bc2e-cb6b365b0b91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.019469] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1612.019469] env[62508]: value = "task-1776153" [ 1612.019469] env[62508]: _type = "Task" [ 1612.019469] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.032635] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.069891] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2018ec79-77d5-4fb8-aeec-4e10a6ad67b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.078234] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72125f79-a3e0-4a6f-bb22-c2758da0119f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.110412] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a77dd88-a616-4f0d-bb2f-8ebe95736d3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.118552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb57f6b-fd49-438d-8573-c03f6ef88aa2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.134618] env[62508]: DEBUG nova.compute.provider_tree [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1612.162306] env[62508]: INFO nova.compute.manager [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Took 20.52 seconds to build instance. [ 1612.453966] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776152, 'name': ReconfigVM_Task, 'duration_secs': 1.130534} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.456027] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Reconfigured VM instance instance-0000002f to attach disk [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65/aedbd388-3ef7-410f-b0e3-5ea67ad56b65.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.456027] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e24c79a-8dbb-4648-b06a-8e3cb4fd195f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.461741] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1612.461741] env[62508]: value = "task-1776154" [ 1612.461741] env[62508]: _type = "Task" [ 1612.461741] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.469422] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776154, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.494848] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.528728] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121104} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.528996] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1612.529784] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f25e42-a6d9-4975-903a-4fe136457f79 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.553433] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.553822] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99611e9e-31ab-4b24-9efc-b3dbfd80d073 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.573785] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1612.573785] env[62508]: value = "task-1776155" [ 1612.573785] env[62508]: _type = "Task" [ 1612.573785] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.581938] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.656827] env[62508]: ERROR nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [req-abec7552-8593-497f-9804-d736147f42f6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-abec7552-8593-497f-9804-d736147f42f6"}]} [ 1612.666049] env[62508]: DEBUG oslo_concurrency.lockutils [None req-50e376b5-1492-4961-9117-0d5e0fb6a8e5 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.037s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.666049] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1612.672970] env[62508]: DEBUG nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1612.688679] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1612.688838] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1612.689029] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.689231] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1612.689383] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.689534] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1612.689740] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1612.689899] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1612.690082] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1612.690256] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1612.690510] env[62508]: DEBUG nova.virt.hardware [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1612.691435] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1a9a15-c119-4d42-ab0d-7e3b302d35ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.695141] env[62508]: DEBUG nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1612.695406] env[62508]: DEBUG nova.compute.provider_tree [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1612.703920] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91224678-d5c1-470d-b030-30bfb02cf71e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.708942] env[62508]: DEBUG nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1612.730033] env[62508]: DEBUG nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1612.852361] env[62508]: INFO nova.compute.manager [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Rebuilding instance [ 1612.891825] env[62508]: DEBUG nova.compute.manager [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1612.892728] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b908f37a-dfc0-4260-ae11-30905b348cf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.972102] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776154, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.997571] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.084175] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.085950] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ade8ac-7d67-4e83-9c5e-2988a1c27c0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.092952] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e5a5c7-f0fb-4300-9685-52e02088d173 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.124070] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78929c23-9546-4e01-a4fc-7defd0daf2e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.131539] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8e6e5d-e8d0-479f-9d31-1fd1a9091d2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.145941] env[62508]: DEBUG nova.compute.provider_tree [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1613.406702] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1613.407024] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a289ab85-281e-4048-b783-f49532311d39 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.415303] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1613.415303] env[62508]: value = "task-1776156" [ 1613.415303] env[62508]: _type = "Task" [ 1613.415303] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.423325] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.471672] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776154, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.498376] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.584928] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776155, 'name': ReconfigVM_Task, 'duration_secs': 0.657009} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.585221] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfigured VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1613.585953] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4654d619-dd22-4c2d-8761-acc863376f64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.591489] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1613.591489] env[62508]: value = "task-1776157" [ 1613.591489] env[62508]: _type = "Task" [ 1613.591489] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.599688] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776157, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.680800] env[62508]: DEBUG nova.scheduler.client.report [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1613.681123] env[62508]: DEBUG nova.compute.provider_tree [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 107 to 108 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1613.681336] env[62508]: DEBUG nova.compute.provider_tree [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1613.925982] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776156, 'name': PowerOffVM_Task, 'duration_secs': 0.139464} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.926293] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.926564] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1613.927405] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca354d0e-eb94-4e4b-b62d-f9a542ba7475 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.934345] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1613.934594] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae37af7c-ea17-4e73-b1d2-1e2f5571b51d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.958753] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1613.959034] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1613.959256] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleting the datastore file [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1613.959529] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0776039-48a7-4c10-afba-b543fe2feb55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.968066] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1613.968066] env[62508]: value = "task-1776159" [ 1613.968066] env[62508]: _type = "Task" [ 1613.968066] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.975716] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776154, 'name': Rename_Task, 'duration_secs': 1.270762} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.976516] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.976839] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c590647a-7781-45e7-a53d-a5623a421204 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.982747] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.987283] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1613.987283] env[62508]: value = "task-1776160" [ 1613.987283] env[62508]: _type = "Task" [ 1613.987283] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.998579] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776160, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.002095] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.102133] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776157, 'name': Rename_Task, 'duration_secs': 0.183719} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.102467] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1614.102743] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3247e218-0271-4ac3-96ea-962589ba3467 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.108955] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1614.108955] env[62508]: value = "task-1776161" [ 1614.108955] env[62508]: _type = "Task" [ 1614.108955] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.117639] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.187022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.545s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.187595] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1614.190399] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.549s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.190545] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.192619] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.595s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.192858] env[62508]: DEBUG nova.objects.instance [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lazy-loading 'resources' on Instance uuid 14c911d6-44c2-4c56-a027-3d25a1e58bcc {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1614.213561] env[62508]: INFO nova.scheduler.client.report [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocations for instance 80a9e17e-4095-498c-80c8-200bfb4f3d1f [ 1614.481604] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168442} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.482296] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1614.482296] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1614.482296] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1614.502600] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776160, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.506567] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.620737] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776161, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.696084] env[62508]: DEBUG nova.compute.utils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1614.701009] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1614.701208] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1614.723410] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cd957c8-4a35-4554-aa17-5b700c8dc746 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "80a9e17e-4095-498c-80c8-200bfb4f3d1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.827s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.746460] env[62508]: DEBUG nova.policy [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4334bea6d1be4d17b3e9c3ccb77087e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40b20dc13e8843d98d80664cd88d018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1615.001877] env[62508]: DEBUG oslo_vmware.api [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776160, 'name': PowerOnVM_Task, 'duration_secs': 0.668146} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.002231] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.008073] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.009133] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Successfully created port: daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1615.012048] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1d7e92-64b0-4ce1-b6cd-d65b58a6457a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.022170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc23e361-d6bc-4483-b698-4bb938a37b9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.057658] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6ae865-dcba-44b6-b686-29407aed0cae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.067322] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b864674a-b0b5-49de-a2ec-314966a12b1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.082933] env[62508]: DEBUG nova.compute.provider_tree [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.117067] env[62508]: DEBUG nova.compute.manager [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1615.118254] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a84b2b-07bf-4495-bd67-71dfe15e3a2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.128557] env[62508]: DEBUG oslo_vmware.api [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776161, 'name': PowerOnVM_Task, 'duration_secs': 0.542626} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.130830] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.131106] env[62508]: INFO nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1615.131397] env[62508]: DEBUG nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1615.136301] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0760660-3471-42b2-98ef-d664fa5a277f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.201822] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1615.505670] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.521046] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1615.521337] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1615.521494] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1615.521708] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1615.521888] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1615.522063] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1615.522291] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1615.522467] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1615.522656] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1615.522852] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1615.523064] env[62508]: DEBUG nova.virt.hardware [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1615.523972] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc41eeed-f4ab-4969-83fa-ce91b5ca4aec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.532472] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57808e70-61c8-476e-9d1c-c2723d75cc49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.546124] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1615.552035] env[62508]: DEBUG oslo.service.loopingcall [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.552295] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1615.553103] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b62c32e3-99e0-4806-9f27-4e6ef901fe83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.570216] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1615.570216] env[62508]: value = "task-1776162" [ 1615.570216] env[62508]: _type = "Task" [ 1615.570216] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.578140] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776162, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.590302] env[62508]: DEBUG nova.scheduler.client.report [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1615.655648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c76157fa-cbc0-4eed-857b-8b89d36886e5 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.943s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.658998] env[62508]: INFO nova.compute.manager [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Took 23.64 seconds to build instance. [ 1616.007295] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.079806] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776162, 'name': CreateVM_Task, 'duration_secs': 0.396336} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.080091] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.080502] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.080673] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.081014] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.081304] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed04ab18-2855-44af-ac5c-cf27dec02904 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.086158] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1616.086158] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a0018-277d-9375-c820-f505b9a5b127" [ 1616.086158] env[62508]: _type = "Task" [ 1616.086158] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.094105] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a0018-277d-9375-c820-f505b9a5b127, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.095953] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.126981] env[62508]: INFO nova.scheduler.client.report [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted allocations for instance 14c911d6-44c2-4c56-a027-3d25a1e58bcc [ 1616.161412] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fb740cd3-99db-46a8-88cd-4e90c5000764 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.159s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.212819] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1616.235636] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1616.235900] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1616.236070] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1616.236261] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1616.236413] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1616.236583] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1616.236799] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1616.236960] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1616.237143] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1616.237339] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1616.237537] env[62508]: DEBUG nova.virt.hardware [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1616.238434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d688e62d-e91f-40ee-8b8c-71e7b49acf14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.247293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a3c728-21e3-491e-91ff-1b61ecd15767 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.507682] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776150, 'name': CreateVM_Task, 'duration_secs': 5.846805} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.507994] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.508602] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.598376] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520a0018-277d-9375-c820-f505b9a5b127, 'name': SearchDatastore_Task, 'duration_secs': 0.01627} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.598696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.598974] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1616.599235] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.599398] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.599585] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.599875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.600227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.600467] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cc44c5d-ce78-4e65-846e-9c67dd67022f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.602469] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fc3a376-91fb-403a-8a32-c6d67498966b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.607721] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1616.607721] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52760f6f-33b3-a783-696a-21145acb6a90" [ 1616.607721] env[62508]: _type = "Task" [ 1616.607721] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.612118] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.612631] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1616.613461] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe07938e-f6e3-4df3-824c-160a318b2dbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.618544] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52760f6f-33b3-a783-696a-21145acb6a90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.621608] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1616.621608] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dcc467-4366-5b75-71e5-61de4ec9d2dd" [ 1616.621608] env[62508]: _type = "Task" [ 1616.621608] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.630079] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dcc467-4366-5b75-71e5-61de4ec9d2dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.638828] env[62508]: DEBUG oslo_concurrency.lockutils [None req-93de380a-6940-4700-aa05-7887629bfdd8 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "14c911d6-44c2-4c56-a027-3d25a1e58bcc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.110s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.765185] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1616.766212] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbd6c3a-1083-428b-a0e7-e2e1940371e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.772536] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1616.772744] env[62508]: ERROR oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk due to incomplete transfer. [ 1616.772915] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0437a038-9138-47ef-8719-b8f95ce2d2f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.781606] env[62508]: DEBUG oslo_vmware.rw_handles [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523c3bd3-9bed-d386-1c41-823c403677a3/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1616.781829] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploaded image 57b290e8-da3a-4e9f-9233-d8f772b973bf to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1616.784261] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1616.784805] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b28b71c6-f827-44ab-93f4-dc18ddd92466 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.791058] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1616.791058] env[62508]: value = "task-1776163" [ 1616.791058] env[62508]: _type = "Task" [ 1616.791058] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.801671] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776163, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.952113] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4331b0bb-18ce-45a1-ba85-778981ae9e06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.959637] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Suspending the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1616.959944] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cc72dd63-a8dd-49f0-96ea-2df174b784e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.966685] env[62508]: DEBUG oslo_vmware.api [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1616.966685] env[62508]: value = "task-1776164" [ 1616.966685] env[62508]: _type = "Task" [ 1616.966685] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.975765] env[62508]: DEBUG oslo_vmware.api [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776164, 'name': SuspendVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.122260] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52760f6f-33b3-a783-696a-21145acb6a90, 'name': SearchDatastore_Task, 'duration_secs': 0.01522} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.125942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.126658] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1617.126954] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.133422] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dcc467-4366-5b75-71e5-61de4ec9d2dd, 'name': SearchDatastore_Task, 'duration_secs': 0.015697} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.134616] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba7731b-ecba-4074-a70f-054ca19fc92d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.142140] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1617.142140] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275999b-9961-09d9-d833-4ff0b2a6a541" [ 1617.142140] env[62508]: _type = "Task" [ 1617.142140] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.150903] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275999b-9961-09d9-d833-4ff0b2a6a541, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.177242] env[62508]: DEBUG nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Successfully updated port: 3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1617.302442] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776163, 'name': Destroy_Task, 'duration_secs': 0.410052} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.302759] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroyed the VM [ 1617.303045] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1617.303351] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a8a4bd8b-afba-48cd-9d52-e46d40fd0013 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.311403] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1617.311403] env[62508]: value = "task-1776165" [ 1617.311403] env[62508]: _type = "Task" [ 1617.311403] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.321389] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776165, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.403749] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "68d64a06-f752-459c-a152-157893e79bfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.404104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.404408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "68d64a06-f752-459c-a152-157893e79bfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.404670] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.404882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.407254] env[62508]: INFO nova.compute.manager [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Terminating instance [ 1617.409364] env[62508]: DEBUG nova.compute.manager [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1617.409576] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1617.410484] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0202b559-87f4-4239-af15-71c2b0675012 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.418691] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1617.418986] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8764ce5-1004-4863-8232-3d9cb452f746 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.424917] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1617.424917] env[62508]: value = "task-1776166" [ 1617.424917] env[62508]: _type = "Task" [ 1617.424917] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.433787] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776166, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.478700] env[62508]: DEBUG oslo_vmware.api [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776164, 'name': SuspendVM_Task} progress is 41%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.651732] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5275999b-9961-09d9-d833-4ff0b2a6a541, 'name': SearchDatastore_Task, 'duration_secs': 0.078082} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.652095] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.652301] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1617.652625] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.652820] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1617.653060] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7cb626fe-81f8-4dbf-b60a-21d5a04a9011 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.655174] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf9dc202-64b7-43bf-8f7f-a6ae5fd90d37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.661880] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1617.661880] env[62508]: value = "task-1776167" [ 1617.661880] env[62508]: _type = "Task" [ 1617.661880] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.666403] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1617.666577] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1617.667548] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d40e7da-f5fb-4d7f-b000-956908f61b2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.673157] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.676216] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1617.676216] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c8a53-2765-7e42-a3d8-ad8098aa9faa" [ 1617.676216] env[62508]: _type = "Task" [ 1617.676216] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.679590] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.679838] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.679932] env[62508]: DEBUG nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1617.688112] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c8a53-2765-7e42-a3d8-ad8098aa9faa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.707386] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Successfully updated port: daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1617.770545] env[62508]: DEBUG nova.compute.manager [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-vif-plugged-3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.770734] env[62508]: DEBUG oslo_concurrency.lockutils [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.770977] env[62508]: DEBUG oslo_concurrency.lockutils [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.772164] env[62508]: DEBUG oslo_concurrency.lockutils [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.772164] env[62508]: DEBUG nova.compute.manager [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] No waiting events found dispatching network-vif-plugged-3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1617.772164] env[62508]: WARNING nova.compute.manager [req-12c4a88a-a370-4479-b3f5-5555ba629b5b req-2623a5a2-26c3-49b6-9e02-9ca4415f2b65 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received unexpected event network-vif-plugged-3f15fe5b-6f5a-4889-8452-9da44feeaab9 for instance with vm_state active and task_state None. [ 1617.801836] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Successfully updated port: 8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1617.824448] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776165, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.931419] env[62508]: DEBUG nova.compute.manager [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Received event network-vif-plugged-daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.931656] env[62508]: DEBUG oslo_concurrency.lockutils [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] Acquiring lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.931865] env[62508]: DEBUG oslo_concurrency.lockutils [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.932052] env[62508]: DEBUG oslo_concurrency.lockutils [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.932300] env[62508]: DEBUG nova.compute.manager [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] No waiting events found dispatching network-vif-plugged-daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1617.932483] env[62508]: WARNING nova.compute.manager [req-b002f065-51e4-4fed-a382-5b73bd96d0e8 req-25f6e4af-763c-4c8e-b7fa-446dfbc4adc8 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Received unexpected event network-vif-plugged-daf9c849-a4e9-4de3-ba16-819ff682a207 for instance with vm_state building and task_state spawning. [ 1617.939206] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776166, 'name': PowerOffVM_Task, 'duration_secs': 0.225632} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.939206] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.939330] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1617.939538] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8a9807f-b24e-4e3e-a43f-1647e97eb2f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.977914] env[62508]: DEBUG oslo_vmware.api [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776164, 'name': SuspendVM_Task, 'duration_secs': 0.983049} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.978208] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Suspended the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1617.978391] env[62508]: DEBUG nova.compute.manager [None req-42abbac3-897d-4d56-b2f1-57fa87e0bd73 tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1617.979212] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b7a510-e519-41b9-b173-920232f6f168 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.070870] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.071132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.071304] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] 68d64a06-f752-459c-a152-157893e79bfd {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.071569] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-133bd2b8-456d-4775-b18f-fee6c542090f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.077401] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1618.077401] env[62508]: value = "task-1776169" [ 1618.077401] env[62508]: _type = "Task" [ 1618.077401] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.085505] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.171703] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776167, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.188163] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c8a53-2765-7e42-a3d8-ad8098aa9faa, 'name': SearchDatastore_Task, 'duration_secs': 0.034754} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.189412] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2636dee0-e757-4305-8430-eaa1fdef3795 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.196029] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1618.196029] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2c3ab-c1f6-3a48-65a5-d3c9a4c821bd" [ 1618.196029] env[62508]: _type = "Task" [ 1618.196029] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.206606] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2c3ab-c1f6-3a48-65a5-d3c9a4c821bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.210270] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.210396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquired lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.210549] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1618.234179] env[62508]: WARNING nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1618.307241] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.307241] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.307241] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1618.329328] env[62508]: DEBUG oslo_vmware.api [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776165, 'name': RemoveSnapshot_Task, 'duration_secs': 0.660754} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.329990] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1618.331864] env[62508]: INFO nova.compute.manager [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 14.98 seconds to snapshot the instance on the hypervisor. [ 1618.394297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.394586] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.589341] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.674512] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.922204} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.675063] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1618.675561] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1618.675820] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ffa2198-cf42-4df3-9f33-6af6540810ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.683625] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1618.683625] env[62508]: value = "task-1776170" [ 1618.683625] env[62508]: _type = "Task" [ 1618.683625] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.689055] env[62508]: DEBUG nova.network.neutron [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3f15fe5b-6f5a-4889-8452-9da44feeaab9", "address": "fa:16:3e:95:85:9f", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f15fe5b-6f", "ovs_interfaceid": "3f15fe5b-6f5a-4889-8452-9da44feeaab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.695481] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.708027] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f2c3ab-c1f6-3a48-65a5-d3c9a4c821bd, 'name': SearchDatastore_Task, 'duration_secs': 0.080428} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.708027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.708027] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f465712f-f65a-4521-90ab-e9f5c5b6de5f/f465712f-f65a-4521-90ab-e9f5c5b6de5f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1618.708027] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b3c6301-22b0-4e12-99a7-201c6f34f355 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.715444] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1618.715444] env[62508]: value = "task-1776171" [ 1618.715444] env[62508]: _type = "Task" [ 1618.715444] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.724472] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.761811] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1618.856856] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1618.876753] env[62508]: DEBUG nova.compute.manager [None req-36877842-0556-45f9-9f64-eba83b124863 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Found 1 images (rotation: 2) {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1618.896854] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1619.015179] env[62508]: DEBUG nova.network.neutron [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Updating instance_info_cache with network_info: [{"id": "daf9c849-a4e9-4de3-ba16-819ff682a207", "address": "fa:16:3e:5d:08:d7", "network": {"id": "32e685b7-e95a-427f-a633-14460b8c7a84", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-649749590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40b20dc13e8843d98d80664cd88d018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaf9c849-a4", "ovs_interfaceid": "daf9c849-a4e9-4de3-ba16-819ff682a207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.087735] env[62508]: DEBUG oslo_vmware.api [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.806454} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.088711] env[62508]: DEBUG nova.network.neutron [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.089859] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1619.090065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1619.090250] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1619.090423] env[62508]: INFO nova.compute.manager [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1619.090669] env[62508]: DEBUG oslo.service.loopingcall [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.091068] env[62508]: DEBUG nova.compute.manager [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1619.091162] env[62508]: DEBUG nova.network.neutron [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1619.165593] env[62508]: INFO nova.compute.manager [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Resuming [ 1619.166204] env[62508]: DEBUG nova.objects.instance [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'flavor' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1619.193810] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215206} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.194137] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1619.194936] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697bf0c2-68d8-43fe-bce0-d8127633af18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.197979] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.198593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.198736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.199533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468da934-87d3-459b-9cc9-77dd4161e9f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1619.222040] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1619.222918] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1619.222918] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1619.222918] env[62508]: DEBUG nova.virt.hardware [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1619.229398] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfiguring VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1619.240060] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db8e252e-0739-4375-9129-db4fbbb732c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.262024] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1619.262769] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66fd521f-bb23-4ed4-8b32-1d783cbe02a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.284174] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776171, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.286683] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1619.286683] env[62508]: value = "task-1776173" [ 1619.286683] env[62508]: _type = "Task" [ 1619.286683] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.287130] env[62508]: DEBUG oslo_vmware.api [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1619.287130] env[62508]: value = "task-1776172" [ 1619.287130] env[62508]: _type = "Task" [ 1619.287130] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.299552] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776173, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.302187] env[62508]: DEBUG oslo_vmware.api [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776172, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.426367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.426367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.427453] env[62508]: INFO nova.compute.claims [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1619.517471] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Releasing lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.518155] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Instance network_info: |[{"id": "daf9c849-a4e9-4de3-ba16-819ff682a207", "address": "fa:16:3e:5d:08:d7", "network": {"id": "32e685b7-e95a-427f-a633-14460b8c7a84", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-649749590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40b20dc13e8843d98d80664cd88d018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaf9c849-a4", "ovs_interfaceid": "daf9c849-a4e9-4de3-ba16-819ff682a207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1619.518361] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:08:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'daf9c849-a4e9-4de3-ba16-819ff682a207', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.526943] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Creating folder: Project (40b20dc13e8843d98d80664cd88d018a). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1619.527568] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10299ea7-c744-4340-b30a-26219740c53e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.538109] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Created folder: Project (40b20dc13e8843d98d80664cd88d018a) in parent group-v368536. [ 1619.538316] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Creating folder: Instances. Parent ref: group-v368746. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1619.538563] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eacaacd9-824e-499c-9c71-e53286ed7644 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.548042] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Created folder: Instances in parent group-v368746. [ 1619.548404] env[62508]: DEBUG oslo.service.loopingcall [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.548458] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1619.548646] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-692414f8-baa8-45a4-8e7d-b62bed752c47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.567829] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.567829] env[62508]: value = "task-1776176" [ 1619.567829] env[62508]: _type = "Task" [ 1619.567829] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.575669] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776176, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.592396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.592723] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Instance network_info: |[{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1619.593139] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:8b:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8834c92d-7c01-4079-9e5c-7fbe2b25d73e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.600765] env[62508]: DEBUG oslo.service.loopingcall [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.600978] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1619.601239] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e90a1f7-4604-427f-a228-1206a119cca2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.620831] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.620831] env[62508]: value = "task-1776177" [ 1619.620831] env[62508]: _type = "Task" [ 1619.620831] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.629597] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776177, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.731236] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.813705} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.731566] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f465712f-f65a-4521-90ab-e9f5c5b6de5f/f465712f-f65a-4521-90ab-e9f5c5b6de5f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1619.731697] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1619.731951] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f1b5457-0520-453d-bf63-d5c7771fe893 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.738245] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1619.738245] env[62508]: value = "task-1776178" [ 1619.738245] env[62508]: _type = "Task" [ 1619.738245] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.746881] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776178, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.801541] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.805352] env[62508]: DEBUG oslo_vmware.api [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.949521] env[62508]: DEBUG nova.network.neutron [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.997993] env[62508]: DEBUG nova.compute.manager [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-changed-3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1619.998117] env[62508]: DEBUG nova.compute.manager [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing instance network info cache due to event network-changed-3f15fe5b-6f5a-4889-8452-9da44feeaab9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1619.998409] env[62508]: DEBUG oslo_concurrency.lockutils [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.998565] env[62508]: DEBUG oslo_concurrency.lockutils [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.998733] env[62508]: DEBUG nova.network.neutron [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Refreshing network info cache for port 3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1620.079210] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776176, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.114048] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Received event network-vif-plugged-8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.114288] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.114505] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.114701] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.114888] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] No waiting events found dispatching network-vif-plugged-8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1620.115116] env[62508]: WARNING nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Received unexpected event network-vif-plugged-8834c92d-7c01-4079-9e5c-7fbe2b25d73e for instance with vm_state building and task_state spawning. [ 1620.115242] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Received event network-changed-daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.115426] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Refreshing instance network info cache due to event network-changed-daf9c849-a4e9-4de3-ba16-819ff682a207. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1620.115643] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Acquiring lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.115798] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Acquired lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.115955] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Refreshing network info cache for port daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1620.130985] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776177, 'name': CreateVM_Task, 'duration_secs': 0.373764} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.131175] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.131834] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.131992] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.132327] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.132588] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a58076-b64e-43b2-8437-e90ddb48cfc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.137758] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1620.137758] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a7ba2a-2481-13c2-c852-903b038e2cab" [ 1620.137758] env[62508]: _type = "Task" [ 1620.137758] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.146893] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a7ba2a-2481-13c2-c852-903b038e2cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.175179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.175340] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquired lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.175551] env[62508]: DEBUG nova.network.neutron [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1620.182605] env[62508]: DEBUG nova.compute.manager [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1620.184032] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b736061c-4429-4c0d-b158-d50f5d0955ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.250221] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776178, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129577} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.250551] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1620.251373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3c5ebb-03cf-4d77-9935-fbdc83804d1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.275156] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] f465712f-f65a-4521-90ab-e9f5c5b6de5f/f465712f-f65a-4521-90ab-e9f5c5b6de5f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1620.275156] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78aace22-f019-45e1-b6f4-84a56a2cad56 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.301164] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776173, 'name': ReconfigVM_Task, 'duration_secs': 0.92716} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.305095] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15/4cc6d0f4-413a-44e1-850f-da499f582d15.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1620.305818] env[62508]: DEBUG oslo_vmware.api [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776172, 'name': ReconfigVM_Task, 'duration_secs': 0.844396} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.306594] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1620.306594] env[62508]: value = "task-1776179" [ 1620.306594] env[62508]: _type = "Task" [ 1620.306594] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.306594] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e73b99f6-f5ad-4677-9922-51df9f719271 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.308795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.308795] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfigured VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1620.321411] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.322778] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1620.322778] env[62508]: value = "task-1776180" [ 1620.322778] env[62508]: _type = "Task" [ 1620.322778] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.333097] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776180, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.452646] env[62508]: INFO nova.compute.manager [-] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Took 1.36 seconds to deallocate network for instance. [ 1620.580264] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776176, 'name': CreateVM_Task, 'duration_secs': 0.754356} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.580465] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.581150] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.649632] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a7ba2a-2481-13c2-c852-903b038e2cab, 'name': SearchDatastore_Task, 'duration_secs': 0.027281} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.652717] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.653068] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.653319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.653470] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.653684] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.654407] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.654672] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.654893] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa6328e5-d44a-415e-9cb6-f694ce2afc3f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.656715] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd09949a-bae2-4573-8af9-c0b8c5574c05 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.661719] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1620.661719] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f305d2-4e82-4889-057c-cc24497fb05d" [ 1620.661719] env[62508]: _type = "Task" [ 1620.661719] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.669562] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.669736] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1620.670906] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d8f886-afa2-46a8-9ae1-a26f53e7965f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.680215] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f305d2-4e82-4889-057c-cc24497fb05d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.685273] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1620.685273] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffd126-57ab-b70b-761b-4f0601666579" [ 1620.685273] env[62508]: _type = "Task" [ 1620.685273] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.692689] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffd126-57ab-b70b-761b-4f0601666579, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.696916] env[62508]: INFO nova.compute.manager [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] instance snapshotting [ 1620.697548] env[62508]: DEBUG nova.objects.instance [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.777664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4354a7bb-dc44-4f9c-a350-1f406bdd2ae9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.787327] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40a9a4b-f8b0-4f53-b8fc-8ded83b37252 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.820676] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25841d5f-3405-46fd-a524-be5f180ed568 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 12.523s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.825257] env[62508]: DEBUG nova.network.neutron [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updated VIF entry in instance network info cache for port 3f15fe5b-6f5a-4889-8452-9da44feeaab9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1620.825715] env[62508]: DEBUG nova.network.neutron [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3f15fe5b-6f5a-4889-8452-9da44feeaab9", "address": "fa:16:3e:95:85:9f", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f15fe5b-6f", "ovs_interfaceid": "3f15fe5b-6f5a-4889-8452-9da44feeaab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.831120] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d94bed-045b-4bdd-b2e6-f888afa78f87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.846398] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776179, 'name': ReconfigVM_Task, 'duration_secs': 0.330581} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.851788] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] f465712f-f65a-4521-90ab-e9f5c5b6de5f/f465712f-f65a-4521-90ab-e9f5c5b6de5f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1620.851939] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776180, 'name': Rename_Task, 'duration_secs': 0.36363} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.853082] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cbc40f1-2046-4f22-81e6-d40637cee54f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.853979] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1620.855188] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2147cdf5-1d9e-4226-92c2-91630d23bf49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.859688] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-286ff5e1-a06b-456c-8e6e-8876b390816c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.872161] env[62508]: DEBUG nova.compute.provider_tree [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.883273] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1620.883273] env[62508]: value = "task-1776181" [ 1620.883273] env[62508]: _type = "Task" [ 1620.883273] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.883273] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1620.883273] env[62508]: value = "task-1776182" [ 1620.883273] env[62508]: _type = "Task" [ 1620.883273] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.892139] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776181, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.895443] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776182, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.957121] env[62508]: DEBUG nova.network.neutron [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [{"id": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "address": "fa:16:3e:c0:6a:f0", "network": {"id": "c8fc62e6-749b-4f96-8d05-8664390ef76f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1342046586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9b1180071bc4cc2a419daac2f51e3f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3f776a-1e", "ovs_interfaceid": "bd3f776a-1eed-4e8d-b7f3-d958db372a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.959681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.979438] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Updated VIF entry in instance network info cache for port daf9c849-a4e9-4de3-ba16-819ff682a207. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1620.979783] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Updating instance_info_cache with network_info: [{"id": "daf9c849-a4e9-4de3-ba16-819ff682a207", "address": "fa:16:3e:5d:08:d7", "network": {"id": "32e685b7-e95a-427f-a633-14460b8c7a84", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-649749590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40b20dc13e8843d98d80664cd88d018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaf9c849-a4", "ovs_interfaceid": "daf9c849-a4e9-4de3-ba16-819ff682a207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.178278] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f305d2-4e82-4889-057c-cc24497fb05d, 'name': SearchDatastore_Task, 'duration_secs': 0.013558} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.178715] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.179065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1621.179393] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.196541] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffd126-57ab-b70b-761b-4f0601666579, 'name': SearchDatastore_Task, 'duration_secs': 0.010785} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.197468] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66dde70d-c698-4e6c-bed3-bc8b3cdfa0af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.204810] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1621.204810] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f18cd-04ac-e1ab-bdfe-bd54388d364b" [ 1621.204810] env[62508]: _type = "Task" [ 1621.204810] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.209731] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870caf83-d406-4df3-8e19-3af01c1de1f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.218092] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f18cd-04ac-e1ab-bdfe-bd54388d364b, 'name': SearchDatastore_Task, 'duration_secs': 0.01174} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.231072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.231356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.231768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.231966] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.232192] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-408cf2a7-76bd-4a21-b38f-d4bd824bd068 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.234655] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297ff52d-c175-4072-832e-8e458f098a7a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.237343] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efd61484-b3e7-4ef5-86a1-70c0ddca8b79 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.248122] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1621.248122] env[62508]: value = "task-1776183" [ 1621.248122] env[62508]: _type = "Task" [ 1621.248122] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.248393] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.248520] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1621.249506] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59a3ee3d-09a6-4d49-9bbc-641bea21c241 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.257547] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1621.257547] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f05a19-7d20-89a6-1c20-d6d90b2c1adf" [ 1621.257547] env[62508]: _type = "Task" [ 1621.257547] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.261411] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.269638] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f05a19-7d20-89a6-1c20-d6d90b2c1adf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.328370] env[62508]: DEBUG oslo_concurrency.lockutils [req-43a9fade-ea2a-46bd-9011-b62d5a7f730e req-f700baef-7e88-4e15-9cc8-1ef5df852497 service nova] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.375811] env[62508]: DEBUG nova.scheduler.client.report [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1621.394748] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776182, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.395026] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776181, 'name': Rename_Task, 'duration_secs': 0.279904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.396048] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1621.396310] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d20ad7b-dc17-46c1-bff4-03f4030fc17a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.402297] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1621.402297] env[62508]: value = "task-1776184" [ 1621.402297] env[62508]: _type = "Task" [ 1621.402297] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.410964] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.459682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Releasing lock "refresh_cache-aedbd388-3ef7-410f-b0e3-5ea67ad56b65" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.460689] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651ba515-9a96-40bb-9698-9a68796b746a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.468192] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Resuming the VM {{(pid=62508) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1621.468450] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64f3cf01-2da7-4063-890f-c7c1205402c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.474312] env[62508]: DEBUG oslo_vmware.api [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1621.474312] env[62508]: value = "task-1776185" [ 1621.474312] env[62508]: _type = "Task" [ 1621.474312] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.482605] env[62508]: DEBUG oslo_vmware.api [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.486240] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Releasing lock "refresh_cache-a617fe8b-c70e-4988-a6ce-437ccc5261c6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.486548] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Received event network-changed-8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1621.486751] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Refreshing instance network info cache due to event network-changed-8834c92d-7c01-4079-9e5c-7fbe2b25d73e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1621.486947] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Acquiring lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.487136] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Acquired lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.487312] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Refreshing network info cache for port 8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1621.751983] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1621.751983] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d8867df2-dd8e-4f66-895f-f5a364cb0ec5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.769095] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1621.769095] env[62508]: value = "task-1776186" [ 1621.769095] env[62508]: _type = "Task" [ 1621.769095] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.769402] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776183, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.777591] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f05a19-7d20-89a6-1c20-d6d90b2c1adf, 'name': SearchDatastore_Task, 'duration_secs': 0.012771} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.778863] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b243f87-ec29-4fd6-9873-2b9f7b78634b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.784614] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776186, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.788762] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1621.788762] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c878b-7f2a-8e64-3d77-edcf89002c19" [ 1621.788762] env[62508]: _type = "Task" [ 1621.788762] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.799325] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c878b-7f2a-8e64-3d77-edcf89002c19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.881518] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.882152] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1621.885039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.925s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.885302] env[62508]: DEBUG nova.objects.instance [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lazy-loading 'resources' on Instance uuid 68d64a06-f752-459c-a152-157893e79bfd {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1621.896825] env[62508]: DEBUG oslo_vmware.api [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776182, 'name': PowerOnVM_Task, 'duration_secs': 0.722575} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.897897] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1621.898137] env[62508]: DEBUG nova.compute.manager [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1621.898933] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7972f72-081b-4a07-a032-c14071b02e79 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.915972] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.985565] env[62508]: DEBUG oslo_vmware.api [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776185, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.268149] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585214} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.268467] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1622.268693] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1622.270188] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da180764-8b30-4576-bfab-b6faa0e0f362 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.283875] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1622.283875] env[62508]: value = "task-1776187" [ 1622.283875] env[62508]: _type = "Task" [ 1622.283875] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.286879] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776186, 'name': CreateSnapshot_Task, 'duration_secs': 0.50368} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.290954] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1622.291994] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d75324-610f-44d5-b4cb-1608c1dbf6d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.312286] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.312571] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c878b-7f2a-8e64-3d77-edcf89002c19, 'name': SearchDatastore_Task, 'duration_secs': 0.026124} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.313493] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updated VIF entry in instance network info cache for port 8834c92d-7c01-4079-9e5c-7fbe2b25d73e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1622.313895] env[62508]: DEBUG nova.network.neutron [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.315362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.315741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a617fe8b-c70e-4988-a6ce-437ccc5261c6/a617fe8b-c70e-4988-a6ce-437ccc5261c6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1622.316523] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3ecf3bc-e5fd-4a59-bd7d-cccec5d77820 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.326022] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1622.326022] env[62508]: value = "task-1776188" [ 1622.326022] env[62508]: _type = "Task" [ 1622.326022] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.336148] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.396861] env[62508]: DEBUG nova.compute.utils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.398944] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1622.398944] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1622.421264] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776184, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.430069] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.443991] env[62508]: DEBUG nova.policy [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '596ff35abb3949e9b3d3d9b80e6eae69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15a9d6b8eb4e44a7a3d7fa4abe0cd5bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1622.488900] env[62508]: DEBUG oslo_vmware.api [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776185, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.759200] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Successfully created port: 72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1622.764703] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-3f15fe5b-6f5a-4889-8452-9da44feeaab9" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.764990] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-3f15fe5b-6f5a-4889-8452-9da44feeaab9" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.795871] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082833} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.797113] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1622.797924] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625bff5b-397b-40ee-b978-2bf0d83d635f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.801155] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ea0c5b-469d-4fe9-a7e9-ce454d6c5763 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.816924] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eada13f2-78e2-4c39-88c1-7e288bc5675d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.826964] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1622.827438] env[62508]: DEBUG oslo_concurrency.lockutils [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] Releasing lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.827668] env[62508]: DEBUG nova.compute.manager [req-90de3828-6237-45ea-b5e7-068c9a24c27a req-24c704a9-bab1-45c7-9d1c-cf879a604bdf service nova] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Received event network-vif-deleted-8407dcaf-0ebb-4d5f-ab85-80cb879b38bb {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1622.836520] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.836891] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7cbfcff1-c00b-409b-bbbc-0c1bbe1f2e08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.839696] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5179ccd5-7fe9-403f-ab56-ab79f64e52b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.884474] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fcbe8c-025d-49a4-ae32-e4c645d61f9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.890091] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.892423] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1622.892423] env[62508]: value = "task-1776189" [ 1622.892423] env[62508]: _type = "Task" [ 1622.892423] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.892685] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1622.892685] env[62508]: value = "task-1776190" [ 1622.892685] env[62508]: _type = "Task" [ 1622.892685] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.901847] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922e688b-7df9-4c94-8847-2b85df1c4f5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.913381] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1622.916501] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.916807] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776189, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.931851] env[62508]: DEBUG nova.compute.provider_tree [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.936784] env[62508]: DEBUG oslo_vmware.api [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776184, 'name': PowerOnVM_Task, 'duration_secs': 1.039985} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.937390] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1622.937873] env[62508]: INFO nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Took 14.79 seconds to spawn the instance on the hypervisor. [ 1622.937873] env[62508]: DEBUG nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1622.939079] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63830f5-994d-43e0-8064-6d35d65818b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.986939] env[62508]: DEBUG oslo_vmware.api [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776185, 'name': PowerOnVM_Task, 'duration_secs': 1.103457} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.988231] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Resumed the VM {{(pid=62508) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1622.989519] env[62508]: DEBUG nova.compute.manager [None req-5fad760d-9b64-4278-8ebc-b7d1b9d5792e tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1622.989643] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c81788-130b-452d-a923-bd423e11000c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.140457] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "4cc6d0f4-413a-44e1-850f-da499f582d15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.140842] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.141122] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "4cc6d0f4-413a-44e1-850f-da499f582d15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.141397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.141696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.144657] env[62508]: INFO nova.compute.manager [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Terminating instance [ 1623.146650] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "refresh_cache-4cc6d0f4-413a-44e1-850f-da499f582d15" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.146746] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "refresh_cache-4cc6d0f4-413a-44e1-850f-da499f582d15" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.146852] env[62508]: DEBUG nova.network.neutron [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.267548] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.267744] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.269086] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a074933-1219-4e75-8ee1-382daad1b74a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.289204] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ffcb2b-e349-4c63-b384-33bcc84fc7ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.317074] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfiguring VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1623.317441] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56d0bf2a-11e8-419d-b814-01809953de0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.342864] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1623.342864] env[62508]: value = "task-1776191" [ 1623.342864] env[62508]: _type = "Task" [ 1623.342864] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.350610] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776188, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.356091] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.407372] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776189, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.411473] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776190, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.439024] env[62508]: DEBUG nova.scheduler.client.report [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.460077] env[62508]: INFO nova.compute.manager [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Took 31.00 seconds to build instance. [ 1623.674904] env[62508]: DEBUG nova.network.neutron [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.733170] env[62508]: DEBUG nova.network.neutron [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.848155] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776188, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.175417} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.851139] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a617fe8b-c70e-4988-a6ce-437ccc5261c6/a617fe8b-c70e-4988-a6ce-437ccc5261c6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1623.851416] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1623.851714] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d760cd0e-0f1b-4221-8b03-17fe9f6932c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.858799] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.860154] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1623.860154] env[62508]: value = "task-1776192" [ 1623.860154] env[62508]: _type = "Task" [ 1623.860154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.868526] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776192, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.906689] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776189, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.909930] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776190, 'name': ReconfigVM_Task, 'duration_secs': 0.64561} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.910256] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.910956] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8670bc14-fd08-4913-8834-1e5330d7d1d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.917369] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1623.917369] env[62508]: value = "task-1776193" [ 1623.917369] env[62508]: _type = "Task" [ 1623.917369] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.929803] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1623.932047] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776193, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.947171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.060s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.947904] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.518s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.948113] env[62508]: DEBUG nova.objects.instance [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1623.965116] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1623.965419] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1623.965626] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1623.965999] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1623.966173] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1623.966550] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1623.966688] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1623.966741] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1623.967917] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1623.967917] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1623.967917] env[62508]: DEBUG nova.virt.hardware [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1623.967917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a850d0a4-b176-4410-8f65-f93cad8fcbfe tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.528s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.969331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b2e7d2-72d1-4963-a85f-4938b6c16519 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.973007] env[62508]: INFO nova.scheduler.client.report [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted allocations for instance 68d64a06-f752-459c-a152-157893e79bfd [ 1623.980023] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d527dda6-c9ea-4068-bb62-0ee87077dbad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.238027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "refresh_cache-4cc6d0f4-413a-44e1-850f-da499f582d15" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.238027] env[62508]: DEBUG nova.compute.manager [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1624.238027] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.238027] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d7bce5-0824-4ac1-86a2-f42ed5f27158 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.249061] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.249061] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11a6bf33-89ea-4bdc-a95b-23aaa7ba1163 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.255356] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1624.255356] env[62508]: value = "task-1776194" [ 1624.255356] env[62508]: _type = "Task" [ 1624.255356] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.267149] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.270128] env[62508]: DEBUG nova.compute.manager [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Received event network-vif-plugged-72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1624.270651] env[62508]: DEBUG oslo_concurrency.lockutils [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] Acquiring lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.270995] env[62508]: DEBUG oslo_concurrency.lockutils [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.271295] env[62508]: DEBUG oslo_concurrency.lockutils [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.271763] env[62508]: DEBUG nova.compute.manager [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] No waiting events found dispatching network-vif-plugged-72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.271763] env[62508]: WARNING nova.compute.manager [req-a91c9ffd-5d7e-48bb-b5cb-e21c2d8fdbb2 req-9c44623d-0ee0-49a9-9409-f9838efc44a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Received unexpected event network-vif-plugged-72c0e2c3-36d1-4cd8-91e2-487be99a6348 for instance with vm_state building and task_state spawning. [ 1624.359907] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.370730] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776192, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105289} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.372071] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1624.373029] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c503173-ed4c-41fe-828e-1a448f3bef01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.407128] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a617fe8b-c70e-4988-a6ce-437ccc5261c6/a617fe8b-c70e-4988-a6ce-437ccc5261c6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1624.412135] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bef61f3e-553e-4a70-b906-2439eabffacf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.439611] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776189, 'name': CloneVM_Task, 'duration_secs': 1.341814} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.441083] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Successfully updated port: 72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.444629] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created linked-clone VM from snapshot [ 1624.445135] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1624.445135] env[62508]: value = "task-1776195" [ 1624.445135] env[62508]: _type = "Task" [ 1624.445135] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.447291] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e59d6fa-887b-47c4-a142-6e651be7da5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.458497] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776193, 'name': Rename_Task, 'duration_secs': 0.198248} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.465033] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1624.470641] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-833ef0a2-d3bc-4f13-99d9-2b8309b0c51b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.474330] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploading image 57743140-2ac4-4e84-acc9-58f9b671609a {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1624.479945] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776195, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.487845] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1624.487845] env[62508]: value = "task-1776196" [ 1624.487845] env[62508]: _type = "Task" [ 1624.487845] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.488344] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d5010420-6fa1-4b2a-bc9a-fe1e27140998 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "68d64a06-f752-459c-a152-157893e79bfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.084s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.503681] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.509325] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1624.509325] env[62508]: value = "vm-368751" [ 1624.509325] env[62508]: _type = "VirtualMachine" [ 1624.509325] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1624.509673] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1cfdaa8f-2369-4c6a-a0ce-cc4e2831cff8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.518354] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease: (returnval){ [ 1624.518354] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52792eca-9f05-6b68-7708-ef2afe5e15ec" [ 1624.518354] env[62508]: _type = "HttpNfcLease" [ 1624.518354] env[62508]: } obtained for exporting VM: (result){ [ 1624.518354] env[62508]: value = "vm-368751" [ 1624.518354] env[62508]: _type = "VirtualMachine" [ 1624.518354] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1624.518725] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the lease: (returnval){ [ 1624.518725] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52792eca-9f05-6b68-7708-ef2afe5e15ec" [ 1624.518725] env[62508]: _type = "HttpNfcLease" [ 1624.518725] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1624.527877] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1624.527877] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52792eca-9f05-6b68-7708-ef2afe5e15ec" [ 1624.527877] env[62508]: _type = "HttpNfcLease" [ 1624.527877] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1624.765895] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776194, 'name': PowerOffVM_Task, 'duration_secs': 0.417187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.766203] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.766374] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.766638] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a5aae9e-02fb-4d49-a4fb-bfca811a7874 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.795730] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1624.795999] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1624.796253] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleting the datastore file [datastore1] 4cc6d0f4-413a-44e1-850f-da499f582d15 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.796551] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-495eeb41-e6f4-4a34-9fd7-deaf5c6dd999 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.802827] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1624.802827] env[62508]: value = "task-1776199" [ 1624.802827] env[62508]: _type = "Task" [ 1624.802827] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.812036] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.858577] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.858858] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.863563] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.947142] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.947142] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.947305] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.962404] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776195, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.970231] env[62508]: DEBUG oslo_concurrency.lockutils [None req-42470adf-ce93-482c-af28-e95fefaed79b tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.999886] env[62508]: DEBUG oslo_vmware.api [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776196, 'name': PowerOnVM_Task, 'duration_secs': 0.512622} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.000229] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1625.000367] env[62508]: INFO nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Took 12.33 seconds to spawn the instance on the hypervisor. [ 1625.000551] env[62508]: DEBUG nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1625.001365] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af44605-f598-44f3-b9ce-5e5eb5c3069a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.027257] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1625.027257] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52792eca-9f05-6b68-7708-ef2afe5e15ec" [ 1625.027257] env[62508]: _type = "HttpNfcLease" [ 1625.027257] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1625.027569] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1625.027569] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52792eca-9f05-6b68-7708-ef2afe5e15ec" [ 1625.027569] env[62508]: _type = "HttpNfcLease" [ 1625.027569] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1625.028351] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab82211-2e51-49eb-a5c3-a3e7e38ee27b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.035817] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1625.035969] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1625.163051] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-968662d5-7265-4165-89d4-9c658ab97ff0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.312937] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.362080] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.364604] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1625.463079] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776195, 'name': ReconfigVM_Task, 'duration_secs': 0.599947} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.463502] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a617fe8b-c70e-4988-a6ce-437ccc5261c6/a617fe8b-c70e-4988-a6ce-437ccc5261c6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1625.464565] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dad695b-095a-455e-9f20-686ab1ff90f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.473243] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1625.473243] env[62508]: value = "task-1776200" [ 1625.473243] env[62508]: _type = "Task" [ 1625.473243] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.486834] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776200, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.508537] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1625.521208] env[62508]: INFO nova.compute.manager [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Took 28.18 seconds to build instance. [ 1625.725158] env[62508]: DEBUG nova.network.neutron [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Updating instance_info_cache with network_info: [{"id": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "address": "fa:16:3e:aa:5e:a2", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72c0e2c3-36", "ovs_interfaceid": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.815303] env[62508]: DEBUG oslo_vmware.api [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.639439} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.816038] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.816038] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.816180] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.816376] env[62508]: INFO nova.compute.manager [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Took 1.58 seconds to destroy the instance on the hypervisor. [ 1625.816649] env[62508]: DEBUG oslo.service.loopingcall [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.817112] env[62508]: DEBUG nova.compute.manager [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1625.817211] env[62508]: DEBUG nova.network.neutron [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.837371] env[62508]: DEBUG nova.network.neutron [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1625.867300] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.894228] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.894496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.897034] env[62508]: INFO nova.compute.claims [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1625.984666] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776200, 'name': Rename_Task, 'duration_secs': 0.169082} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.985091] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1625.985378] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5437aba-932f-4cb9-8029-ab4914036d5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.994919] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1625.994919] env[62508]: value = "task-1776201" [ 1625.994919] env[62508]: _type = "Task" [ 1625.994919] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.003439] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.023682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c79a358-0104-4356-9d53-b1203802daaa tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.693s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.230667] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.230667] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Instance network_info: |[{"id": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "address": "fa:16:3e:aa:5e:a2", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72c0e2c3-36", "ovs_interfaceid": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1626.230667] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:5e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72c0e2c3-36d1-4cd8-91e2-487be99a6348', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1626.238960] env[62508]: DEBUG oslo.service.loopingcall [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.239307] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1626.239647] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4f31490-ec14-4721-aed0-b56f616f4f2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.260950] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1626.260950] env[62508]: value = "task-1776202" [ 1626.260950] env[62508]: _type = "Task" [ 1626.260950] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.268834] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776202, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.296901] env[62508]: DEBUG nova.compute.manager [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Received event network-changed-72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1626.297160] env[62508]: DEBUG nova.compute.manager [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Refreshing instance network info cache due to event network-changed-72c0e2c3-36d1-4cd8-91e2-487be99a6348. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1626.297486] env[62508]: DEBUG oslo_concurrency.lockutils [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] Acquiring lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.297675] env[62508]: DEBUG oslo_concurrency.lockutils [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] Acquired lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.297923] env[62508]: DEBUG nova.network.neutron [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Refreshing network info cache for port 72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.340214] env[62508]: DEBUG nova.network.neutron [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.363018] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.507408] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776201, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.771032] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776202, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.846972] env[62508]: INFO nova.compute.manager [-] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Took 1.03 seconds to deallocate network for instance. [ 1626.864304] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.006096] env[62508]: DEBUG oslo_vmware.api [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776201, 'name': PowerOnVM_Task, 'duration_secs': 0.544945} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.009010] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1627.009887] env[62508]: INFO nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Took 10.80 seconds to spawn the instance on the hypervisor. [ 1627.009887] env[62508]: DEBUG nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1627.010547] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec04e870-8c4f-4d9d-85aa-3b3c5be7a182 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.078151] env[62508]: DEBUG nova.network.neutron [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Updated VIF entry in instance network info cache for port 72c0e2c3-36d1-4cd8-91e2-487be99a6348. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1627.078558] env[62508]: DEBUG nova.network.neutron [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Updating instance_info_cache with network_info: [{"id": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "address": "fa:16:3e:aa:5e:a2", "network": {"id": "ca54620c-2118-4248-ac67-90f8579e33aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-578420006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15a9d6b8eb4e44a7a3d7fa4abe0cd5bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72c0e2c3-36", "ovs_interfaceid": "72c0e2c3-36d1-4cd8-91e2-487be99a6348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.263737] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4dcdaa0-1b23-4471-a0dd-8840c65db8a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.279101] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b003f6b-308c-4ecd-aa59-b6beff6bcf2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.282524] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776202, 'name': CreateVM_Task, 'duration_secs': 0.583485} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.282681] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1627.283795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.283982] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.284328] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1627.284584] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37abc669-65f3-4c5b-b734-0b7439897a77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.313697] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c869d545-2e3c-4e1c-b4eb-50f8250b3877 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.318079] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1627.318079] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a536-b785-25e7-daf4-1082f7de8ba0" [ 1627.318079] env[62508]: _type = "Task" [ 1627.318079] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.324949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7aabee-ad9b-426d-a267-e905f04378d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.332536] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a536-b785-25e7-daf4-1082f7de8ba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.343811] env[62508]: DEBUG nova.compute.provider_tree [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.362818] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.363338] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.471871] env[62508]: DEBUG nova.compute.manager [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1627.530082] env[62508]: INFO nova.compute.manager [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Took 30.18 seconds to build instance. [ 1627.581530] env[62508]: DEBUG oslo_concurrency.lockutils [req-3db1a0c6-8a40-4426-b013-06c6f8a7934e req-6cbfe581-de82-4416-ae4c-e6f91bbc3897 service nova] Releasing lock "refresh_cache-091a11ef-d6c7-4f04-90a6-273da14ce88b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.829150] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a536-b785-25e7-daf4-1082f7de8ba0, 'name': SearchDatastore_Task, 'duration_secs': 0.020186} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.829468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.829753] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1627.830318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.830318] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.830498] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.830596] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb84ec5d-208e-41ba-8ec4-78e5faa766c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.844203] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.844415] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1627.845222] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9c7f248-a114-405e-abc1-7c09cb0660ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.848820] env[62508]: DEBUG nova.scheduler.client.report [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1627.855871] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1627.855871] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a5b093-1f20-ebeb-6236-8b17e6426827" [ 1627.855871] env[62508]: _type = "Task" [ 1627.855871] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.869557] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a5b093-1f20-ebeb-6236-8b17e6426827, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.873170] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.991347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.032312] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f81b30d6-87f3-4996-856e-1c4add1760e1 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.700s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.353715] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.354291] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1628.356965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.994s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.357238] env[62508]: DEBUG nova.objects.instance [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lazy-loading 'resources' on Instance uuid 4cc6d0f4-413a-44e1-850f-da499f582d15 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.370939] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.374617] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a5b093-1f20-ebeb-6236-8b17e6426827, 'name': SearchDatastore_Task, 'duration_secs': 0.01284} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.375849] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d232e31-87ad-4bdb-ae1f-906d2c9b6254 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.382409] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1628.382409] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528296ff-f41c-b98f-62ac-e5c39797de3a" [ 1628.382409] env[62508]: _type = "Task" [ 1628.382409] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.391447] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528296ff-f41c-b98f-62ac-e5c39797de3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.553954] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.554287] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.554504] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.554681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.554851] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.557566] env[62508]: INFO nova.compute.manager [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Terminating instance [ 1628.561122] env[62508]: DEBUG nova.compute.manager [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1628.561324] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1628.562177] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fddadd9-ab4d-4dc9-a731-7a73588aa059 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.569508] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1628.569735] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30db9276-954c-4722-960d-aa5a9ee452a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.576251] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1628.576251] env[62508]: value = "task-1776203" [ 1628.576251] env[62508]: _type = "Task" [ 1628.576251] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.584036] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.619704] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.619979] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.620210] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.620397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.620567] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.622716] env[62508]: INFO nova.compute.manager [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Terminating instance [ 1628.627796] env[62508]: DEBUG nova.compute.manager [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1628.627994] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1628.628834] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9ba6a8-fab0-49c7-9724-c72b43322ed5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.636726] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1628.636946] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50947d5f-1a7a-46dc-a915-41f8fadc89a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.643999] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1628.643999] env[62508]: value = "task-1776204" [ 1628.643999] env[62508]: _type = "Task" [ 1628.643999] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.651488] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.860987] env[62508]: DEBUG nova.compute.utils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.866626] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1628.867050] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1628.880637] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.893230] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528296ff-f41c-b98f-62ac-e5c39797de3a, 'name': SearchDatastore_Task, 'duration_secs': 0.016126} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.893493] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.893752] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 091a11ef-d6c7-4f04-90a6-273da14ce88b/091a11ef-d6c7-4f04-90a6-273da14ce88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1628.894030] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baad1c49-8d9f-4d17-ac63-159eac16592e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.904847] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1628.904847] env[62508]: value = "task-1776205" [ 1628.904847] env[62508]: _type = "Task" [ 1628.904847] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.914745] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.938763] env[62508]: DEBUG nova.policy [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74c20248784c3ca734e528856f21f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce3b480c3c81499599aef114f92775cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1629.088145] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776203, 'name': PowerOffVM_Task, 'duration_secs': 0.329974} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.088470] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1629.088643] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1629.088900] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e42f95e6-1c3b-4c9d-ad65-d8c441d92582 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.157399] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776204, 'name': PowerOffVM_Task, 'duration_secs': 0.21597} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.157477] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1629.158636] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1629.158636] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0e8e683-6a0f-48b9-93d9-90fa7f4902a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.166941] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1629.167208] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1629.167424] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleting the datastore file [datastore1] aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1629.167715] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1aba3a6f-2662-45f2-b144-275997f42d7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.181186] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for the task: (returnval){ [ 1629.181186] env[62508]: value = "task-1776208" [ 1629.181186] env[62508]: _type = "Task" [ 1629.181186] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.190550] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.217140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25414d4c-c5b4-454f-88b5-8e773879589e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.226582] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59b6295-48a6-4e6f-af8b-89feb5b95817 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.273343] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Successfully created port: 47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1629.276173] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7f3f56-f182-4ef4-b972-9511a3025b0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.280677] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1629.280926] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1629.281130] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Deleting the datastore file [datastore1] a617fe8b-c70e-4988-a6ce-437ccc5261c6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1629.281823] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e5c1cd1-7840-4d5a-8f6e-09667926d427 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.287457] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd8ece4-9c61-4390-bbed-5db0a744ca9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.292698] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for the task: (returnval){ [ 1629.292698] env[62508]: value = "task-1776209" [ 1629.292698] env[62508]: _type = "Task" [ 1629.292698] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.304561] env[62508]: DEBUG nova.compute.provider_tree [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.311208] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.370606] env[62508]: DEBUG oslo_vmware.api [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776191, 'name': ReconfigVM_Task, 'duration_secs': 5.925074} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.370963] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.371278] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Reconfigured VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1629.374405] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1629.417552] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776205, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.691666] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.737602] env[62508]: DEBUG nova.compute.manager [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-vif-deleted-3f15fe5b-6f5a-4889-8452-9da44feeaab9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.737814] env[62508]: INFO nova.compute.manager [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Neutron deleted interface 3f15fe5b-6f5a-4889-8452-9da44feeaab9; detaching it from the instance and deleting it from the info cache [ 1629.738185] env[62508]: DEBUG nova.network.neutron [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.802967] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.808447] env[62508]: DEBUG nova.scheduler.client.report [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.917039] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776205, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706008} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.917039] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 091a11ef-d6c7-4f04-90a6-273da14ce88b/091a11ef-d6c7-4f04-90a6-273da14ce88b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1629.917320] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1629.917429] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06dc4125-a2d0-4cd7-b54c-8a1b840d1851 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.924619] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1629.924619] env[62508]: value = "task-1776210" [ 1629.924619] env[62508]: _type = "Task" [ 1629.924619] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.933424] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.187908] env[62508]: DEBUG oslo_vmware.api [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Task: {'id': task-1776208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.53756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.188306] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1630.188368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1630.188537] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1630.188705] env[62508]: INFO nova.compute.manager [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1630.188945] env[62508]: DEBUG oslo.service.loopingcall [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.189145] env[62508]: DEBUG nova.compute.manager [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1630.189239] env[62508]: DEBUG nova.network.neutron [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1630.240628] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.240876] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] Acquired lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.241863] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccfcd38-1202-4f78-a760-3e5007a85dd1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.260966] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] Releasing lock "70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.261354] env[62508]: WARNING nova.compute.manager [req-f0fb48fa-631a-4373-a542-b22433204ded req-c9ee200d-72bd-44a5-99bc-bc6d0b90f299 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Detach interface failed, port_id=3f15fe5b-6f5a-4889-8452-9da44feeaab9, reason: No device with interface-id 3f15fe5b-6f5a-4889-8452-9da44feeaab9 exists on VM: nova.exception.NotFound: No device with interface-id 3f15fe5b-6f5a-4889-8452-9da44feeaab9 exists on VM [ 1630.304171] env[62508]: DEBUG oslo_vmware.api [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Task: {'id': task-1776209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.608646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.304407] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1630.304591] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1630.304766] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1630.304944] env[62508]: INFO nova.compute.manager [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1630.305228] env[62508]: DEBUG oslo.service.loopingcall [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.305419] env[62508]: DEBUG nova.compute.manager [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1630.305530] env[62508]: DEBUG nova.network.neutron [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1630.313327] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.315530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.324s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.338324] env[62508]: INFO nova.scheduler.client.report [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleted allocations for instance 4cc6d0f4-413a-44e1-850f-da499f582d15 [ 1630.385287] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1630.414717] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1630.415096] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1630.415282] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1630.415508] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1630.415697] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1630.415944] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1630.416216] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1630.416408] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1630.416586] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1630.416765] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1630.416973] env[62508]: DEBUG nova.virt.hardware [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1630.417931] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847da66c-2a4d-473e-b048-8333d0447c3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.431096] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9909ad36-eab6-44c3-8ede-c28aa2d71385 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.453044] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067226} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.453397] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1630.454692] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d43404-f40a-42f0-aa00-c20e814d87e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.480207] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 091a11ef-d6c7-4f04-90a6-273da14ce88b/091a11ef-d6c7-4f04-90a6-273da14ce88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1630.480744] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1656d09b-1310-4816-b663-d7d76ac4e375 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.503490] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1630.503490] env[62508]: value = "task-1776211" [ 1630.503490] env[62508]: _type = "Task" [ 1630.503490] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.512452] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776211, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.656874] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.657200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.657412] env[62508]: DEBUG nova.network.neutron [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1630.711102] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.711388] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.711606] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.711790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.711959] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.715207] env[62508]: INFO nova.compute.manager [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Terminating instance [ 1630.717141] env[62508]: DEBUG nova.compute.manager [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1630.717341] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1630.718531] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36580f7f-bb87-4bb8-9bc4-56e67dc7ac52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.727176] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1630.727330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97b5b421-caf1-49a0-8259-908e218e909f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.735056] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1630.735056] env[62508]: value = "task-1776212" [ 1630.735056] env[62508]: _type = "Task" [ 1630.735056] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.746262] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.820882] env[62508]: INFO nova.compute.claims [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1630.850866] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28afa123-8dcb-4600-bbc9-506cf52d2d42 tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "4cc6d0f4-413a-44e1-850f-da499f582d15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.710s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.937253] env[62508]: DEBUG nova.compute.manager [req-076ab04f-eb9b-487a-981b-c9c1ff862450 req-d742b51f-f664-4c27-8b60-bd1540454626 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Received event network-vif-deleted-daf9c849-a4e9-4de3-ba16-819ff682a207 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1630.937367] env[62508]: INFO nova.compute.manager [req-076ab04f-eb9b-487a-981b-c9c1ff862450 req-d742b51f-f664-4c27-8b60-bd1540454626 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Neutron deleted interface daf9c849-a4e9-4de3-ba16-819ff682a207; detaching it from the instance and deleting it from the info cache [ 1630.937536] env[62508]: DEBUG nova.network.neutron [req-076ab04f-eb9b-487a-981b-c9c1ff862450 req-d742b51f-f664-4c27-8b60-bd1540454626 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.014713] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.018755] env[62508]: DEBUG nova.network.neutron [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.130038] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Successfully updated port: 47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1631.243610] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776212, 'name': PowerOffVM_Task, 'duration_secs': 0.20903} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.244026] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1631.244070] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1631.244324] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46ec4076-6548-4425-934f-10e1bfa525a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.322379] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1631.322642] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1631.322948] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleting the datastore file [datastore1] 70c8de27-4696-4005-bbec-e7a33e56311b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1631.323154] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1964b54d-d430-4c0b-bc1a-4f42f8d5400e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.327912] env[62508]: INFO nova.compute.resource_tracker [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating resource usage from migration 8172f41b-4bbf-440a-a697-df783272e5c0 [ 1631.332153] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1631.332153] env[62508]: value = "task-1776214" [ 1631.332153] env[62508]: _type = "Task" [ 1631.332153] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.343821] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.408516] env[62508]: DEBUG nova.network.neutron [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [{"id": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "address": "fa:16:3e:9d:f1:1e", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2623e6a9-42", "ovs_interfaceid": "2623e6a9-42b1-4f98-9d68-a5230cdc3d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.410051] env[62508]: DEBUG nova.network.neutron [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.441233] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9b852b8-8453-4505-8c8f-712f5e1444b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.454247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bc14b0-54db-4143-9438-e54e50534357 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.493648] env[62508]: DEBUG nova.compute.manager [req-076ab04f-eb9b-487a-981b-c9c1ff862450 req-d742b51f-f664-4c27-8b60-bd1540454626 service nova] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Detach interface failed, port_id=daf9c849-a4e9-4de3-ba16-819ff682a207, reason: Instance a617fe8b-c70e-4988-a6ce-437ccc5261c6 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1631.518015] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776211, 'name': ReconfigVM_Task, 'duration_secs': 0.762664} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.518325] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 091a11ef-d6c7-4f04-90a6-273da14ce88b/091a11ef-d6c7-4f04-90a6-273da14ce88b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1631.518958] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93cfe627-e513-47a5-85cd-556ef2c92f48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.522847] env[62508]: INFO nova.compute.manager [-] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Took 1.33 seconds to deallocate network for instance. [ 1631.528480] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1631.528480] env[62508]: value = "task-1776215" [ 1631.528480] env[62508]: _type = "Task" [ 1631.528480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.537628] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776215, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.632278] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.632468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.632621] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.658846] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1894426a-d359-4da7-a0e4-bb969bac4949 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.667421] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1dfc918-0d37-4adc-88a9-0014a4979da7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.702681] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124074ff-a47e-4eb3-86d0-804fc3166e44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.710191] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397213ae-1bff-432f-86f3-deb0331e09be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.725514] env[62508]: DEBUG nova.compute.provider_tree [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.762868] env[62508]: DEBUG nova.compute.manager [req-85b7c253-3a6f-405c-b7b9-bcb684124fb7 req-533f7cc1-ad71-4ffb-ac62-8002a9c2fb02 service nova] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Received event network-vif-deleted-bd3f776a-1eed-4e8d-b7f3-d958db372a2f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.842544] env[62508]: DEBUG oslo_vmware.api [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174917} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.842841] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1631.843020] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1631.843201] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1631.843414] env[62508]: INFO nova.compute.manager [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1631.843733] env[62508]: DEBUG oslo.service.loopingcall [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.844015] env[62508]: DEBUG nova.compute.manager [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1631.844135] env[62508]: DEBUG nova.network.neutron [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1631.845887] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.846129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.846328] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.846511] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.846665] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.848618] env[62508]: INFO nova.compute.manager [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Terminating instance [ 1631.850134] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "refresh_cache-a72fd09e-a3be-486a-a03b-8c25b04d82d0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.850295] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquired lock "refresh_cache-a72fd09e-a3be-486a-a03b-8c25b04d82d0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.850458] env[62508]: DEBUG nova.network.neutron [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.911618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-70c8de27-4696-4005-bbec-e7a33e56311b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.915081] env[62508]: INFO nova.compute.manager [-] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Took 1.61 seconds to deallocate network for instance. [ 1632.032206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.040205] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776215, 'name': Rename_Task, 'duration_secs': 0.408702} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.040205] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.040619] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1be6cd8-373e-4013-92cd-e628caf96b6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.048222] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1632.048222] env[62508]: value = "task-1776216" [ 1632.048222] env[62508]: _type = "Task" [ 1632.048222] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.056874] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.178556] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1632.252543] env[62508]: ERROR nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [req-8bf3dce4-c1dd-4e4a-8a58-4f11e3a5df84] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8bf3dce4-c1dd-4e4a-8a58-4f11e3a5df84"}]} [ 1632.275820] env[62508]: DEBUG nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1632.291821] env[62508]: DEBUG nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1632.292079] env[62508]: DEBUG nova.compute.provider_tree [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1632.311466] env[62508]: DEBUG nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1632.332625] env[62508]: DEBUG nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1632.372930] env[62508]: DEBUG nova.network.neutron [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1632.416619] env[62508]: DEBUG oslo_concurrency.lockutils [None req-187dcfb1-97ba-4b55-9874-3b9979f2f8fe tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-70c8de27-4696-4005-bbec-e7a33e56311b-3f15fe5b-6f5a-4889-8452-9da44feeaab9" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.652s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.424285] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.430184] env[62508]: DEBUG nova.network.neutron [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.450556] env[62508]: DEBUG nova.network.neutron [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Updating instance_info_cache with network_info: [{"id": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "address": "fa:16:3e:94:f5:3c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47aa7e28-8a", "ovs_interfaceid": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.560331] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776216, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.659262] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55ba19c-d911-4a1d-b2a8-f53851d3bbfa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.667299] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bb9b7e-363c-4963-90ca-b97f2c49b1d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.700512] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038671ee-7b60-46ba-a184-e6792fc4d6fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.709139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e284bd7b-1078-4c05-8c81-900c829e95b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.726022] env[62508]: DEBUG nova.compute.provider_tree [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1632.933582] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Releasing lock "refresh_cache-a72fd09e-a3be-486a-a03b-8c25b04d82d0" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.933582] env[62508]: DEBUG nova.compute.manager [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1632.933582] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1632.934485] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de991b7-e1b4-4a13-be70-23d0afe81b8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.944587] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1632.944976] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf828dca-5d4b-40f0-962c-eac45d8b148b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.946950] env[62508]: DEBUG nova.network.neutron [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.955494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.956582] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Instance network_info: |[{"id": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "address": "fa:16:3e:94:f5:3c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47aa7e28-8a", "ovs_interfaceid": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1632.956582] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1632.956582] env[62508]: value = "task-1776217" [ 1632.956582] env[62508]: _type = "Task" [ 1632.956582] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.956901] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:f5:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.969063] env[62508]: DEBUG oslo.service.loopingcall [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.969063] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.973117] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b10052c7-58c1-43fb-89c1-37d1aba7f43d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.994270] env[62508]: DEBUG nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Received event network-vif-plugged-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1632.994499] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Acquiring lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.994709] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.994944] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.995149] env[62508]: DEBUG nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] No waiting events found dispatching network-vif-plugged-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1632.995327] env[62508]: WARNING nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Received unexpected event network-vif-plugged-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 for instance with vm_state building and task_state spawning. [ 1632.995546] env[62508]: DEBUG nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Received event network-changed-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1632.995756] env[62508]: DEBUG nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Refreshing instance network info cache due to event network-changed-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1632.995990] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Acquiring lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.996187] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Acquired lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.996355] env[62508]: DEBUG nova.network.neutron [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Refreshing network info cache for port 47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1633.003973] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.006034] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1633.006034] env[62508]: value = "task-1776218" [ 1633.006034] env[62508]: _type = "Task" [ 1633.006034] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.017787] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776218, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.061400] env[62508]: DEBUG oslo_vmware.api [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776216, 'name': PowerOnVM_Task, 'duration_secs': 0.955578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.062265] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1633.062362] env[62508]: INFO nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1633.062537] env[62508]: DEBUG nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1633.063444] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c6976f-9268-499a-98c4-764f8a2cb6ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.260661] env[62508]: DEBUG nova.scheduler.client.report [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1633.261034] env[62508]: DEBUG nova.compute.provider_tree [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 112 to 113 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1633.261448] env[62508]: DEBUG nova.compute.provider_tree [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1633.456526] env[62508]: INFO nova.compute.manager [-] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Took 1.61 seconds to deallocate network for instance. [ 1633.477832] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776217, 'name': PowerOffVM_Task, 'duration_secs': 0.186959} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.478128] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1633.478302] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1633.478560] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-530549aa-8b9b-4908-8be5-863450da90ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.507867] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1633.508176] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1633.508398] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleting the datastore file [datastore1] a72fd09e-a3be-486a-a03b-8c25b04d82d0 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1633.508787] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ff7f006-8a1f-4a01-874d-6b11f2cf05a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.520122] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776218, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.521953] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for the task: (returnval){ [ 1633.521953] env[62508]: value = "task-1776220" [ 1633.521953] env[62508]: _type = "Task" [ 1633.521953] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.530652] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.582017] env[62508]: INFO nova.compute.manager [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Took 14.18 seconds to build instance. [ 1633.766425] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.451s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.766731] env[62508]: INFO nova.compute.manager [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Migrating [ 1633.775493] env[62508]: DEBUG nova.network.neutron [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Updated VIF entry in instance network info cache for port 47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.775941] env[62508]: DEBUG nova.network.neutron [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Updating instance_info_cache with network_info: [{"id": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "address": "fa:16:3e:94:f5:3c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47aa7e28-8a", "ovs_interfaceid": "47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.777316] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.747s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.777566] env[62508]: DEBUG nova.objects.instance [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lazy-loading 'resources' on Instance uuid aedbd388-3ef7-410f-b0e3-5ea67ad56b65 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1633.962429] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.020743] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776218, 'name': CreateVM_Task, 'duration_secs': 0.627109} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.020915] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1634.021608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.021794] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.022143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1634.022440] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9cec9f-3d36-4633-a3a7-d6c722c89070 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.032421] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1634.032421] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52adde06-6315-7c20-5424-1c9474653ec4" [ 1634.032421] env[62508]: _type = "Task" [ 1634.032421] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.035649] env[62508]: DEBUG oslo_vmware.api [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Task: {'id': task-1776220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259876} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.038698] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1634.038932] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1634.039179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1634.039782] env[62508]: INFO nova.compute.manager [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1634.039782] env[62508]: DEBUG oslo.service.loopingcall [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1634.042008] env[62508]: DEBUG nova.compute.manager [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1634.042121] env[62508]: DEBUG nova.network.neutron [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1634.050371] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52adde06-6315-7c20-5424-1c9474653ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.009619} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.050737] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.052087] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1634.052329] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.052488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.052675] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1634.055270] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-798dc9e8-b969-491d-9755-84e90f44b626 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.058567] env[62508]: DEBUG nova.network.neutron [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1634.076884] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1634.077199] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1634.078274] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db06c943-5464-4427-a61c-dad622314313 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.082274] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097596bc-3e34-4e36-8990-b7a3bdaf78a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.085019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72f868df-bea6-4e78-bf46-18c73750999c tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.690s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.087855] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1634.087855] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52751324-96db-005d-54e2-3ab655718c54" [ 1634.087855] env[62508]: _type = "Task" [ 1634.087855] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.092923] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83206375-67f6-4845-8667-7fb1ca30f3e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.102463] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52751324-96db-005d-54e2-3ab655718c54, 'name': SearchDatastore_Task, 'duration_secs': 0.011572} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.129628] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-076135b6-ace1-4ffa-9dae-c823b0066006 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.132896] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbecac6-7a4e-47ef-8d86-09c0c68f825a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.138586] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1634.138586] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c360e7-7c49-912a-115c-58bc04ecd2a8" [ 1634.138586] env[62508]: _type = "Task" [ 1634.138586] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.144788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce044db-02c8-4ba4-9649-5056afbc56fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.155145] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c360e7-7c49-912a-115c-58bc04ecd2a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.161010] env[62508]: DEBUG nova.compute.provider_tree [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.281278] env[62508]: DEBUG oslo_concurrency.lockutils [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] Releasing lock "refresh_cache-9283494f-d8e2-4077-9e4d-57ee4786c3c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.281662] env[62508]: DEBUG nova.compute.manager [req-5b4da256-9f3a-42e1-810b-324e0c48baa3 req-cdd68635-e164-4cbe-ae4f-119bf9a4a927 service nova] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Received event network-vif-deleted-2623e6a9-42b1-4f98-9d68-a5230cdc3d79 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1634.287362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.287551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.287653] env[62508]: DEBUG nova.network.neutron [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1634.562410] env[62508]: DEBUG nova.network.neutron [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.582504] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.582725] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.651581] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c360e7-7c49-912a-115c-58bc04ecd2a8, 'name': SearchDatastore_Task, 'duration_secs': 0.01395} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.651852] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.652139] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9283494f-d8e2-4077-9e4d-57ee4786c3c7/9283494f-d8e2-4077-9e4d-57ee4786c3c7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.652408] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1458732-036e-4921-b23a-9fbd0f4d781b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.659277] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1634.659277] env[62508]: value = "task-1776221" [ 1634.659277] env[62508]: _type = "Task" [ 1634.659277] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.664388] env[62508]: DEBUG nova.scheduler.client.report [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1634.670693] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.909794] env[62508]: DEBUG nova.compute.manager [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1634.911059] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccff1d4-64d2-4024-90cb-c8fe2cab6421 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.043689] env[62508]: DEBUG nova.network.neutron [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.064655] env[62508]: INFO nova.compute.manager [-] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Took 1.02 seconds to deallocate network for instance. [ 1635.090457] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.090622] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1635.168786] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.171841] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.395s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.173952] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.753s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.174191] env[62508]: DEBUG nova.objects.instance [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lazy-loading 'resources' on Instance uuid a617fe8b-c70e-4988-a6ce-437ccc5261c6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.197164] env[62508]: INFO nova.scheduler.client.report [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Deleted allocations for instance aedbd388-3ef7-410f-b0e3-5ea67ad56b65 [ 1635.423972] env[62508]: INFO nova.compute.manager [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] instance snapshotting [ 1635.426213] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea7f1d5-a3ca-42cf-bcdf-0d505cf13ff3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.447386] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acedd68e-5c82-4a7c-a545-4ed0ff05ff0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.547404] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.571200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.669363] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.708379] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b7e8e88e-c6e0-4ee0-afdc-2c0b646c7efa tempest-ServersNegativeTestJSON-1515097760 tempest-ServersNegativeTestJSON-1515097760-project-member] Lock "aedbd388-3ef7-410f-b0e3-5ea67ad56b65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.154s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.959932] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1635.960285] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4c6cbc72-423d-4094-9146-4b2409ad7038 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.971798] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1635.971798] env[62508]: value = "task-1776222" [ 1635.971798] env[62508]: _type = "Task" [ 1635.971798] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.976261] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe21948-f704-4e02-83cf-42ed44c43e14 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.983809] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776222, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.987113] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389adb7a-10f3-4b79-9b50-ecc2338357fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.019383] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99b7c91-0719-4f86-90d8-1d0a0d7dc21f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.027335] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5672fe10-7a4c-4311-b5a9-35bc6bc92aeb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.040993] env[62508]: DEBUG nova.compute.provider_tree [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.171128] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.195539] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1636.196506] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535feced-b078-480a-88a0-c6a887760fbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.203207] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1636.203388] env[62508]: ERROR oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk due to incomplete transfer. [ 1636.203631] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7865d7bc-c3b2-413d-81ac-dcdc5607fb52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.210247] env[62508]: DEBUG oslo_vmware.rw_handles [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5296ec8c-7a5b-0f30-ea07-054f7a337aa0/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1636.210481] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploaded image 57743140-2ac4-4e84-acc9-58f9b671609a to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1636.213113] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1636.213633] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0cd1b37b-5685-418f-bff8-81a35bff6e1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.219949] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1636.219949] env[62508]: value = "task-1776223" [ 1636.219949] env[62508]: _type = "Task" [ 1636.219949] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.229139] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776223, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.482363] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776222, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.544302] env[62508]: DEBUG nova.scheduler.client.report [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1636.671972] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.730566] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776223, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.983467] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776222, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.049296] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.051757] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.089s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.052268] env[62508]: DEBUG nova.objects.instance [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'resources' on Instance uuid 70c8de27-4696-4005-bbec-e7a33e56311b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.064499] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd15e06-db18-440b-881e-181fca0f86e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.088517] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1637.092861] env[62508]: INFO nova.scheduler.client.report [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Deleted allocations for instance a617fe8b-c70e-4988-a6ce-437ccc5261c6 [ 1637.178724] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.232479] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776223, 'name': Destroy_Task, 'duration_secs': 0.789008} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.232822] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroyed the VM [ 1637.233170] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1637.233500] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ce782b34-6410-4bdc-91c2-4f22ec0bfae8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.241570] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1637.241570] env[62508]: value = "task-1776224" [ 1637.241570] env[62508]: _type = "Task" [ 1637.241570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.254498] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776224, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.486786] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776222, 'name': CreateSnapshot_Task, 'duration_secs': 1.089177} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.486786] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1637.487580] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ec8c8b-6253-4560-a1b0-a90cf93fc46a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.601157] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.601157] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61cb79ef-1de9-4546-9d3d-aab298d594d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.608906] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8b09fc86-8d98-48c7-9b21-3f2f652e2009 tempest-ServerAddressesTestJSON-824768532 tempest-ServerAddressesTestJSON-824768532-project-member] Lock "a617fe8b-c70e-4988-a6ce-437ccc5261c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.988s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.615783] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1637.615783] env[62508]: value = "task-1776225" [ 1637.615783] env[62508]: _type = "Task" [ 1637.615783] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.636139] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.672976] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.752107] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776224, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.909098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15beddd9-fc4a-40e8-895c-3f5b7611b02b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.917200] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd6236a-5023-418d-b511-b167cb06dec5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.951084] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b721e47-a2d6-4d49-8170-f99f55de30ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.961120] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33292018-1041-4c2a-a975-a2d49ff9dc0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.972917] env[62508]: DEBUG nova.compute.provider_tree [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1638.006797] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1638.007118] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-eff9d7e5-2311-4394-a94d-aa11b6847703 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.016859] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1638.016859] env[62508]: value = "task-1776226" [ 1638.016859] env[62508]: _type = "Task" [ 1638.016859] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.024665] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776226, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.129988] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776225, 'name': PowerOffVM_Task, 'duration_secs': 0.313415} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.130284] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.130493] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1638.174056] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776221, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.032095} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.174345] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9283494f-d8e2-4077-9e4d-57ee4786c3c7/9283494f-d8e2-4077-9e4d-57ee4786c3c7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1638.174558] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1638.174828] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2930da32-b5ca-4b73-a2de-72c806c89c9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.181438] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1638.181438] env[62508]: value = "task-1776227" [ 1638.181438] env[62508]: _type = "Task" [ 1638.181438] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.189546] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.253402] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776224, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.515881] env[62508]: DEBUG nova.scheduler.client.report [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1638.516245] env[62508]: DEBUG nova.compute.provider_tree [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 113 to 114 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1638.516402] env[62508]: DEBUG nova.compute.provider_tree [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1638.531141] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776226, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.641115] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.641733] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.642063] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.642357] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.642725] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.643119] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.652040] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44e22c82-f4e7-406c-99d1-98d3b0372bb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.671578] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1638.671578] env[62508]: value = "task-1776228" [ 1638.671578] env[62508]: _type = "Task" [ 1638.671578] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.680332] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776228, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.692137] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08645} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.692456] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1638.695018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d50fc0-ad26-403c-81d6-82f70d5d6451 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.718835] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 9283494f-d8e2-4077-9e4d-57ee4786c3c7/9283494f-d8e2-4077-9e4d-57ee4786c3c7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1638.719355] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7115287f-00bc-4491-ab76-fe012c3b4af9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.741768] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1638.741768] env[62508]: value = "task-1776229" [ 1638.741768] env[62508]: _type = "Task" [ 1638.741768] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.755576] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776229, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.759284] env[62508]: DEBUG oslo_vmware.api [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776224, 'name': RemoveSnapshot_Task, 'duration_secs': 1.104125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.760129] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1638.760129] env[62508]: INFO nova.compute.manager [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 17.55 seconds to snapshot the instance on the hypervisor. [ 1639.026128] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.028656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.458s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.028901] env[62508]: DEBUG nova.objects.instance [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lazy-loading 'resources' on Instance uuid a72fd09e-a3be-486a-a03b-8c25b04d82d0 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1639.039424] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776226, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.066802] env[62508]: INFO nova.scheduler.client.report [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted allocations for instance 70c8de27-4696-4005-bbec-e7a33e56311b [ 1639.181980] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776228, 'name': ReconfigVM_Task, 'duration_secs': 0.217844} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.182333] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1639.262714] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776229, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.328438] env[62508]: DEBUG nova.compute.manager [None req-0b37c1b6-56d4-4b7b-8176-49fc55d69ce2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Found 2 images (rotation: 2) {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1639.534825] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776226, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.577576] env[62508]: DEBUG oslo_concurrency.lockutils [None req-97326aed-8aa6-43f3-a6ea-20fbfa5294b2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "70c8de27-4696-4005-bbec-e7a33e56311b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.866s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.691561] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1639.691967] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1639.692164] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1639.692360] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1639.692511] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1639.692664] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1639.692870] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1639.693073] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1639.693271] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1639.693469] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1639.693655] env[62508]: DEBUG nova.virt.hardware [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1639.700251] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfiguring VM instance instance-0000004b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1639.702330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50545a95-fea7-403a-9aed-ef5cbb1870b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.728083] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1639.728083] env[62508]: value = "task-1776230" [ 1639.728083] env[62508]: _type = "Task" [ 1639.728083] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.739900] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.755753] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776229, 'name': ReconfigVM_Task, 'duration_secs': 0.540584} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.758614] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 9283494f-d8e2-4077-9e4d-57ee4786c3c7/9283494f-d8e2-4077-9e4d-57ee4786c3c7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1639.759661] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df88acf0-5813-48c8-8c40-dc224fb3fe71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.765550] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1639.765550] env[62508]: value = "task-1776231" [ 1639.765550] env[62508]: _type = "Task" [ 1639.765550] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.774038] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776231, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.912717] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b011af46-e9d0-400c-a0c1-bfaccc5cf3dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.920533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0c1057-fc5e-44c2-b9bc-1747a627c3d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.958695] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484840ae-7633-4e8f-8dd7-e2366686525d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.967111] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd20776-71aa-458b-8313-bb39b9d48611 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.987616] env[62508]: DEBUG nova.compute.provider_tree [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.032015] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776226, 'name': CloneVM_Task, 'duration_secs': 1.879789} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.032291] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Created linked-clone VM from snapshot [ 1640.036389] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec59b560-d66c-4b6b-b432-b14526eab4c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.040605] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Uploading image fd68febd-8074-46a2-bb28-8644e177281d {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1640.052958] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1640.053242] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-027c153c-db1d-404e-bf8d-8e80e4fd34e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.061388] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1640.061388] env[62508]: value = "task-1776232" [ 1640.061388] env[62508]: _type = "Task" [ 1640.061388] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.070384] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776232, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.169042] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.169466] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.169658] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1640.239291] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776230, 'name': ReconfigVM_Task, 'duration_secs': 0.197971} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.240050] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfigured VM instance instance-0000004b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1640.242948] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e777e62-3b5f-473e-ab5f-1b9bde2804f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.273277] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1640.273784] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8465240-8f26-4962-aada-b3d4f02143cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.308676] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776231, 'name': Rename_Task, 'duration_secs': 0.170066} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.309317] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1640.310416] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1640.310416] env[62508]: value = "task-1776233" [ 1640.310416] env[62508]: _type = "Task" [ 1640.310416] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.310839] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1914c52d-4c0f-4340-b402-20f5919a0071 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.322973] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776233, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.325118] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1640.325118] env[62508]: value = "task-1776234" [ 1640.325118] env[62508]: _type = "Task" [ 1640.325118] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.335075] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.491603] env[62508]: DEBUG nova.scheduler.client.report [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1640.575569] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776232, 'name': Destroy_Task, 'duration_secs': 0.470152} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.575930] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Destroyed the VM [ 1640.576208] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1640.576471] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-95675593-958a-4996-992d-68efc8f59c15 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.583366] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1640.583366] env[62508]: value = "task-1776235" [ 1640.583366] env[62508]: _type = "Task" [ 1640.583366] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.592527] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776235, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.803017] env[62508]: DEBUG nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1640.804136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923c0820-1730-49a3-9361-a64d91e4d912 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.846337] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776233, 'name': ReconfigVM_Task, 'duration_secs': 0.410371} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.851532] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a/2e32ca83-8506-4588-bd33-4eadb7d2d30a.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1640.851862] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1640.856417] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776234, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.996901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.032271] env[62508]: INFO nova.scheduler.client.report [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Deleted allocations for instance a72fd09e-a3be-486a-a03b-8c25b04d82d0 [ 1641.100201] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776235, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.324634] env[62508]: INFO nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] instance snapshotting [ 1641.325542] env[62508]: DEBUG nova.objects.instance [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1641.348299] env[62508]: DEBUG oslo_vmware.api [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776234, 'name': PowerOnVM_Task, 'duration_secs': 0.540204} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.348917] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1641.349509] env[62508]: INFO nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Took 10.96 seconds to spawn the instance on the hypervisor. [ 1641.349979] env[62508]: DEBUG nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1641.351079] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1a9e86-0e25-490f-9637-ba77ca030ae2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.365694] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621657fe-e270-43d8-8d54-c46eca4bc2f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.405355] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73b6c3b-0ffc-44b9-a258-1e6541bbedf3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.426565] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1641.544129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c29272d5-c878-4e22-a393-b9eaba9e8c3c tempest-ServerShowV247Test-1954042013 tempest-ServerShowV247Test-1954042013-project-member] Lock "a72fd09e-a3be-486a-a03b-8c25b04d82d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.698s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.594660] env[62508]: DEBUG oslo_vmware.api [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776235, 'name': RemoveSnapshot_Task, 'duration_secs': 0.886842} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.595070] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1641.805022] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.839025] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5617b400-0dcd-46d9-92da-f9825764b7ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.632649] env[62508]: WARNING nova.compute.manager [None req-6eea3367-68e7-4703-b71e-35c1f3bf19ac tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Image not found during snapshot: nova.exception.ImageNotFound: Image fd68febd-8074-46a2-bb28-8644e177281d could not be found. [ 1642.632649] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.632649] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1642.633033] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.634788] env[62508]: INFO nova.compute.manager [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Took 16.76 seconds to build instance. [ 1642.635092] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.650758] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.653814] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.653814] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867f46d8-b4fb-47f6-9447-999618551a7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.656217] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.656682] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.657048] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1642.657400] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.697022] env[62508]: DEBUG nova.network.neutron [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Port 8834c92d-7c01-4079-9e5c-7fbe2b25d73e binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1642.991086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.991338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.136047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.136047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.136047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.136221] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.136382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.139975] env[62508]: INFO nova.compute.manager [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Terminating instance [ 1643.143417] env[62508]: DEBUG nova.compute.manager [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1643.143417] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1643.143417] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10941eeb-c521-4b52-8942-c28751927208 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.151797] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1643.151797] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39db942b-6967-49ba-aa74-2ea8ca0019e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.153219] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b103515f-3ba6-4873-8679-296cc5b88c63 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.294s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.159060] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1643.159060] env[62508]: value = "task-1776236" [ 1643.159060] env[62508]: _type = "Task" [ 1643.159060] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.163177] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.163372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.163532] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.163677] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1643.164494] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d3dc46-7522-4e4a-b5ec-d2b2c2d3777f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.169685] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1643.174651] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d8dc7bd5-e5e8-4858-ad63-4632299124af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.177078] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.179035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7293b1b5-3356-48c6-8e55-8931b888909f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.195466] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1479013c-cc08-4113-8eda-001d1628c228 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.198086] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1643.198086] env[62508]: value = "task-1776237" [ 1643.198086] env[62508]: _type = "Task" [ 1643.198086] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.210498] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183a9cad-5aa6-4f8a-b4b9-fa32dde571ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.220010] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776237, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.250958] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178145MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1643.251165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.251393] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.494958] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1643.606407] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.606681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.659148] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.659488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.670967] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776236, 'name': PowerOffVM_Task, 'duration_secs': 0.327922} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.671796] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.671942] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.672214] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7efe9c6c-341d-4ce7-921d-217c6f107e9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.707525] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776237, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.725784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.726159] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.726465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.016489] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.109881] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1644.166029] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1644.210651] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776237, 'name': CreateSnapshot_Task, 'duration_secs': 0.779712} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.210971] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1644.211745] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528de3b3-8a2c-4583-8cb0-c39522a95195 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.266623] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Applying migration context for instance 2e32ca83-8506-4588-bd33-4eadb7d2d30a as it has an incoming, in-progress migration 8172f41b-4bbf-440a-a697-df783272e5c0. Migration status is post-migrating {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1644.268904] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating resource usage from migration 8172f41b-4bbf-440a-a697-df783272e5c0 [ 1644.288597] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.288752] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 95a289ac-3178-45ea-80d2-905b9af54f3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.288875] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.288994] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289152] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289275] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e07ab22e-bd07-4232-abfe-c0617c0b9813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289386] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 6afa4e73-64b4-4b10-b598-433f0c22ecb3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289502] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f307d4d5-e877-4d0a-951c-779c1d2e573b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289612] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289723] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a0245a18-638d-4c32-bea2-456408b5e001 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289835] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.289944] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance deee2c81-4d2c-47d3-aae6-ef829d59c644 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290066] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 4d24bacc-48c4-4649-bb29-fcae2cf77782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290177] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 145306d7-f0e8-46c0-b2ab-1c41c208f976 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290284] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290389] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f465712f-f65a-4521-90ab-e9f5c5b6de5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290495] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 091a11ef-d6c7-4f04-90a6-273da14ce88b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290600] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 9283494f-d8e2-4077-9e4d-57ee4786c3c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.290706] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Migration 8172f41b-4bbf-440a-a697-df783272e5c0 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1644.290810] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2e32ca83-8506-4588-bd33-4eadb7d2d30a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1644.630222] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.684664] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.730015] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1644.732862] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4240e1cd-523e-4ab8-9a51-3a97e758b1e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.741696] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1644.741696] env[62508]: value = "task-1776239" [ 1644.741696] env[62508]: _type = "Task" [ 1644.741696] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.749452] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.762452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.762595] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.762773] env[62508]: DEBUG nova.network.neutron [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1644.793160] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a058273e-9c68-4d73-9149-ceb60c1c1cda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.253157] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.296314] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 24091abb-f71f-4528-8fc5-b97725cf079e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.490833] env[62508]: DEBUG nova.network.neutron [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.753578] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.801547] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e875f30e-2c25-46a4-8c74-36f08e7eb982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.801877] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1645.802097] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1645.993337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.098037] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19885f53-0769-4cd0-af67-d31076ef8511 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.105909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b86052-998a-43be-ba01-900c481e1dcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.137699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08435e81-0d0b-4c83-a58b-7c2205da7ea3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.145135] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed176ac4-32a9-4351-9afc-20a2a06e07ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.157891] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1646.253259] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.519057] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f1484a-f866-4051-9333-acc47496a313 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.539771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac49bc0-1419-4c8a-9396-bc5bfd6a9e6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.546543] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1646.680023] env[62508]: ERROR nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [req-d536ba30-5488-4818-b992-a4165de5b845] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d536ba30-5488-4818-b992-a4165de5b845"}]} [ 1646.695856] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1646.709075] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1646.709254] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1646.719601] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1646.736396] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1646.752167] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.971067] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cb1dd5-333a-410c-8225-404ab504c64d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.978442] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48931e0-62b7-4575-abc5-79ee39ea10c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.007357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b5f730-2974-40f2-ad85-0fda1b14227d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.014035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3d6a76-d398-4669-a57e-33e411187f9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.027823] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1647.052035] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.052517] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34ff4c51-7c55-4e75-9dc8-e322e78bd6c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.059014] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1647.059014] env[62508]: value = "task-1776240" [ 1647.059014] env[62508]: _type = "Task" [ 1647.059014] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.066674] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.253190] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.559134] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1647.559386] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 117 to 118 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1647.559539] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1647.573076] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776240, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.754409] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.067792] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1648.068165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.817s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.068364] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.052s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.070460] env[62508]: INFO nova.compute.claims [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1648.079467] env[62508]: DEBUG oslo_vmware.api [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776240, 'name': PowerOnVM_Task, 'duration_secs': 0.763449} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.079607] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.079768] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eea32a09-28ae-4c5d-9f29-3dcf8bd38217 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance '2e32ca83-8506-4588-bd33-4eadb7d2d30a' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1648.254955] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.755900] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.258064] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.433831] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d7cdb-b66a-4de6-98f1-5cd55e429969 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.442951] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ea0e8d-c63c-4585-bbd1-60161fed0014 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.472404] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf777f-5aea-490d-9807-6cc8f54ca273 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.479820] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f7e4e9-f528-4103-ae5c-45d83cdec9d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.492700] env[62508]: DEBUG nova.compute.provider_tree [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1649.638340] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1649.638605] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1649.638786] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleting the datastore file [datastore1] 091a11ef-d6c7-4f04-90a6-273da14ce88b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.639067] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3844c08-1cae-402c-84aa-d556518d6c33 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.648299] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for the task: (returnval){ [ 1649.648299] env[62508]: value = "task-1776241" [ 1649.648299] env[62508]: _type = "Task" [ 1649.648299] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.657347] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.760454] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.999327] env[62508]: DEBUG nova.scheduler.client.report [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1650.023629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.023629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.023629] env[62508]: DEBUG nova.compute.manager [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Going to confirm migration 4 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1650.163022] env[62508]: DEBUG oslo_vmware.api [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Task: {'id': task-1776241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232081} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.163022] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.163022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1650.163022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1650.163022] env[62508]: INFO nova.compute.manager [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Took 7.02 seconds to destroy the instance on the hypervisor. [ 1650.163022] env[62508]: DEBUG oslo.service.loopingcall [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1650.163022] env[62508]: DEBUG nova.compute.manager [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1650.163022] env[62508]: DEBUG nova.network.neutron [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1650.258962] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.417462] env[62508]: DEBUG nova.compute.manager [req-1622d04c-4875-4edd-8c2e-8d11020fa32e req-2558db48-22a5-4d57-a0bf-73efd2d035a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Received event network-vif-deleted-72c0e2c3-36d1-4cd8-91e2-487be99a6348 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1650.417462] env[62508]: INFO nova.compute.manager [req-1622d04c-4875-4edd-8c2e-8d11020fa32e req-2558db48-22a5-4d57-a0bf-73efd2d035a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Neutron deleted interface 72c0e2c3-36d1-4cd8-91e2-487be99a6348; detaching it from the instance and deleting it from the info cache [ 1650.417462] env[62508]: DEBUG nova.network.neutron [req-1622d04c-4875-4edd-8c2e-8d11020fa32e req-2558db48-22a5-4d57-a0bf-73efd2d035a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.514796] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.514796] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1650.516805] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.887s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.519104] env[62508]: INFO nova.compute.claims [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1650.627388] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.627485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.627829] env[62508]: DEBUG nova.network.neutron [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1650.627882] env[62508]: DEBUG nova.objects.instance [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'info_cache' on Instance uuid 2e32ca83-8506-4588-bd33-4eadb7d2d30a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.759633] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.891800] env[62508]: DEBUG nova.network.neutron [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.919672] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-333d223b-39da-43bd-884d-88b7069575a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.930034] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd99bc29-accb-4762-8172-62fe52d9480c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.967078] env[62508]: DEBUG nova.compute.manager [req-1622d04c-4875-4edd-8c2e-8d11020fa32e req-2558db48-22a5-4d57-a0bf-73efd2d035a5 service nova] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Detach interface failed, port_id=72c0e2c3-36d1-4cd8-91e2-487be99a6348, reason: Instance 091a11ef-d6c7-4f04-90a6-273da14ce88b could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1651.027793] env[62508]: DEBUG nova.compute.utils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1651.029103] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1651.029277] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1651.068459] env[62508]: DEBUG nova.policy [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1651.261000] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776239, 'name': CloneVM_Task, 'duration_secs': 6.318016} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.261674] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Created linked-clone VM from snapshot [ 1651.262536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4470fd-f80b-4715-b387-7a25cd016f63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.270695] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploading image 3590bb13-f252-467d-906d-58e26d1c5029 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1651.295194] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1651.295194] env[62508]: value = "vm-368757" [ 1651.295194] env[62508]: _type = "VirtualMachine" [ 1651.295194] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1651.295457] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e10e9876-9884-4980-b120-3442a53d8e4c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.303307] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease: (returnval){ [ 1651.303307] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c7bfc0-ffcb-4341-94a9-c38df35ab07d" [ 1651.303307] env[62508]: _type = "HttpNfcLease" [ 1651.303307] env[62508]: } obtained for exporting VM: (result){ [ 1651.303307] env[62508]: value = "vm-368757" [ 1651.303307] env[62508]: _type = "VirtualMachine" [ 1651.303307] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1651.303307] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the lease: (returnval){ [ 1651.303307] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c7bfc0-ffcb-4341-94a9-c38df35ab07d" [ 1651.303307] env[62508]: _type = "HttpNfcLease" [ 1651.303307] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1651.309732] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1651.309732] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c7bfc0-ffcb-4341-94a9-c38df35ab07d" [ 1651.309732] env[62508]: _type = "HttpNfcLease" [ 1651.309732] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1651.333181] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Successfully created port: be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1651.394748] env[62508]: INFO nova.compute.manager [-] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Took 1.23 seconds to deallocate network for instance. [ 1651.533024] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1651.811477] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1651.811477] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c7bfc0-ffcb-4341-94a9-c38df35ab07d" [ 1651.811477] env[62508]: _type = "HttpNfcLease" [ 1651.811477] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1651.811675] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1651.811675] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c7bfc0-ffcb-4341-94a9-c38df35ab07d" [ 1651.811675] env[62508]: _type = "HttpNfcLease" [ 1651.811675] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1651.812431] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a17cdad-3c1d-469d-a610-8d012ed5eb3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.822201] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1651.822424] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1651.901188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.919488] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e4d5c07e-231e-4f8f-bc9e-379520f9b4ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.938818] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9cc7e2-fb83-4f83-b8c0-1c62a04a7421 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.954326] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9947b0c3-4d74-4484-b3aa-f8b8320ac14e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.969261] env[62508]: DEBUG nova.network.neutron [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [{"id": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "address": "fa:16:3e:85:8b:d0", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8834c92d-7c", "ovs_interfaceid": "8834c92d-7c01-4079-9e5c-7fbe2b25d73e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.003782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-2e32ca83-8506-4588-bd33-4eadb7d2d30a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.004033] env[62508]: DEBUG nova.objects.instance [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'migration_context' on Instance uuid 2e32ca83-8506-4588-bd33-4eadb7d2d30a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1652.009018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138fe55f-27c9-4f0c-a75f-f0d7431bfd17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.014376] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90136a68-a2cd-457c-8039-6e7456d51f80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.028474] env[62508]: DEBUG nova.compute.provider_tree [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.510041] env[62508]: DEBUG nova.objects.base [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Object Instance<2e32ca83-8506-4588-bd33-4eadb7d2d30a> lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1652.511227] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750352d0-f5cb-4a99-9adb-0cbdeaa18778 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.531917] env[62508]: DEBUG nova.scheduler.client.report [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1652.536914] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf1bc3fd-d448-4e40-99e9-f1d902620d5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.543674] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1652.545870] env[62508]: DEBUG oslo_vmware.api [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1652.545870] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d6853a-7cd1-31ee-6c03-7a08e3b94c21" [ 1652.545870] env[62508]: _type = "Task" [ 1652.545870] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.556197] env[62508]: DEBUG oslo_vmware.api [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d6853a-7cd1-31ee-6c03-7a08e3b94c21, 'name': SearchDatastore_Task, 'duration_secs': 0.007372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.556646] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.569350] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1652.569657] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1652.569826] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1652.570101] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1652.570259] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1652.570413] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1652.570693] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1652.570868] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1652.571117] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1652.571303] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1652.571490] env[62508]: DEBUG nova.virt.hardware [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1652.572670] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966a4afb-983c-400e-b9b5-c7414d8324e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.580883] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63465551-2e4e-42af-a176-d4ae58c74093 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.785752] env[62508]: DEBUG nova.compute.manager [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-plugged-be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1652.786116] env[62508]: DEBUG oslo_concurrency.lockutils [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.786708] env[62508]: DEBUG oslo_concurrency.lockutils [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.786708] env[62508]: DEBUG oslo_concurrency.lockutils [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.786708] env[62508]: DEBUG nova.compute.manager [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] No waiting events found dispatching network-vif-plugged-be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1652.786803] env[62508]: WARNING nova.compute.manager [req-65c4d308-8b51-4ade-b84a-74889716c3fb req-ec517d22-52d5-4150-9b81-f007fb953371 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received unexpected event network-vif-plugged-be5b5d9b-1f22-455a-b4f6-128f17030129 for instance with vm_state building and task_state spawning. [ 1652.889632] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Successfully updated port: be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1653.037970] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.041011] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1653.043320] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.359s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.045403] env[62508]: INFO nova.compute.claims [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1653.391707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.392109] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.392410] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1653.550341] env[62508]: DEBUG nova.compute.utils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1653.554109] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1653.554542] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1653.596613] env[62508]: DEBUG nova.policy [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e41dfb589d24bb1ac97ebb67ea59f9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e682eb7bbd240afb2f6581c7478b99c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1653.881909] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Successfully created port: 0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1653.949394] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1654.058109] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1654.179802] env[62508]: DEBUG nova.network.neutron [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.421681] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64abf8bb-7220-41b4-b914-73ce96ffc7d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.430246] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdc9a29-be60-499f-a0a3-8fae98b01ebd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.462925] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b21db3-d165-41aa-bc1b-310261912d8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.471447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9855e96-6264-49de-b0ad-25dbc642b4c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.487723] env[62508]: DEBUG nova.compute.provider_tree [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.682475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.682937] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Instance network_info: |[{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1654.683405] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:37:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be5b5d9b-1f22-455a-b4f6-128f17030129', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1654.691101] env[62508]: DEBUG oslo.service.loopingcall [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.691341] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1654.691590] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6133e2d7-eb4f-4f96-a457-bb584844e49a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.713083] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1654.713083] env[62508]: value = "task-1776243" [ 1654.713083] env[62508]: _type = "Task" [ 1654.713083] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.721208] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776243, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.812229] env[62508]: DEBUG nova.compute.manager [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-changed-be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1654.812497] env[62508]: DEBUG nova.compute.manager [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing instance network info cache due to event network-changed-be5b5d9b-1f22-455a-b4f6-128f17030129. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1654.812652] env[62508]: DEBUG oslo_concurrency.lockutils [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.812871] env[62508]: DEBUG oslo_concurrency.lockutils [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.812960] env[62508]: DEBUG nova.network.neutron [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing network info cache for port be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1654.872421] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.872663] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.991487] env[62508]: DEBUG nova.scheduler.client.report [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1655.070462] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1655.098251] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1655.098506] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1655.098773] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1655.098944] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1655.099149] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1655.099262] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1655.099498] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1655.099675] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1655.099846] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1655.100029] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1655.100218] env[62508]: DEBUG nova.virt.hardware [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1655.101119] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e603cc-4361-44e2-be4b-c000baf84058 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.109828] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7154c8-e8a8-4467-8085-33b5333b1ea5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.223638] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776243, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.376180] env[62508]: DEBUG nova.compute.utils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.496521] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.497143] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1655.502299] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.601s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.506017] env[62508]: DEBUG nova.objects.instance [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lazy-loading 'resources' on Instance uuid 091a11ef-d6c7-4f04-90a6-273da14ce88b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1655.576867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.577481] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.681764] env[62508]: DEBUG nova.network.neutron [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updated VIF entry in instance network info cache for port be5b5d9b-1f22-455a-b4f6-128f17030129. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1655.682221] env[62508]: DEBUG nova.network.neutron [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.724252] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776243, 'name': CreateVM_Task, 'duration_secs': 0.685323} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.724601] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1655.725088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.725264] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.725586] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1655.725839] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-739492c7-4768-416e-ba6d-359de125ee78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.730649] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1655.730649] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7642d-4a6e-6636-f8ca-bb5040e75a86" [ 1655.730649] env[62508]: _type = "Task" [ 1655.730649] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.738211] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7642d-4a6e-6636-f8ca-bb5040e75a86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.739474] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Successfully updated port: 0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1655.879255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.005242] env[62508]: DEBUG nova.compute.utils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1656.010537] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1656.011272] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1656.050407] env[62508]: DEBUG nova.policy [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74c20248784c3ca734e528856f21f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce3b480c3c81499599aef114f92775cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1656.080465] env[62508]: DEBUG nova.compute.utils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1656.185829] env[62508]: DEBUG oslo_concurrency.lockutils [req-920d59ae-2de3-4fc4-86b2-dea54969df68 req-ec06e335-960f-4414-aec0-d2ac259cdd53 service nova] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.241685] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.241857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.242053] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1656.243314] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7642d-4a6e-6636-f8ca-bb5040e75a86, 'name': SearchDatastore_Task, 'duration_secs': 0.011559} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.247048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.247048] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1656.247048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.247048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.247476] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1656.247740] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07ad6006-02f0-4b8e-9554-d5ba647631a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.257970] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1656.258099] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1656.258821] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bf745f-e693-450e-9088-352c731f9499 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.268462] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1656.268462] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522df6c2-4ba6-2dc5-e396-075d86ceffe8" [ 1656.268462] env[62508]: _type = "Task" [ 1656.268462] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.278614] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522df6c2-4ba6-2dc5-e396-075d86ceffe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.298746] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Successfully created port: 12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1656.312441] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51edbc8-55c6-4d4d-b169-35d8b4d3810a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.320139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b7219b-6399-414e-a776-82645c70a368 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.354948] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ae649e-d4b7-4238-9562-79c6f1818a3e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.363061] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ba334a-c1ca-4bd9-aa6a-c2db8131fcfb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.376605] env[62508]: DEBUG nova.compute.provider_tree [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1656.515144] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1656.582929] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.778800] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522df6c2-4ba6-2dc5-e396-075d86ceffe8, 'name': SearchDatastore_Task, 'duration_secs': 0.014464} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.779680] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-942438e5-3be2-47c0-8076-12ffd62bc2cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.784920] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1656.784920] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52672664-4557-534f-a997-7336c8b4af64" [ 1656.784920] env[62508]: _type = "Task" [ 1656.784920] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.792845] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52672664-4557-534f-a997-7336c8b4af64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.798970] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1656.880534] env[62508]: DEBUG nova.scheduler.client.report [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1656.946926] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.947263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.947578] env[62508]: INFO nova.compute.manager [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Attaching volume 9420607e-8de8-4e29-a868-732bd5be0158 to /dev/sdb [ 1656.982181] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0b1d7a-2ab4-417c-865c-dc219815eea2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.990784] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbec847c-8dc5-40ba-84d4-68cc77612700 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.005343] env[62508]: DEBUG nova.virt.block_device [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating existing volume attachment record: cff1a769-5f52-4447-8284-1ac0f61912c8 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1657.008843] env[62508]: DEBUG nova.network.neutron [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.177492] env[62508]: DEBUG nova.compute.manager [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Received event network-vif-plugged-0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1657.177777] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.177963] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.178176] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.178385] env[62508]: DEBUG nova.compute.manager [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] No waiting events found dispatching network-vif-plugged-0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1657.178565] env[62508]: WARNING nova.compute.manager [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Received unexpected event network-vif-plugged-0eda6157-2402-4297-8eb5-07a5b94eba56 for instance with vm_state building and task_state spawning. [ 1657.178735] env[62508]: DEBUG nova.compute.manager [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Received event network-changed-0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1657.178924] env[62508]: DEBUG nova.compute.manager [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Refreshing instance network info cache due to event network-changed-0eda6157-2402-4297-8eb5-07a5b94eba56. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1657.179114] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Acquiring lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.296852] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52672664-4557-534f-a997-7336c8b4af64, 'name': SearchDatastore_Task, 'duration_secs': 0.018325} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.297201] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.297556] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a058273e-9c68-4d73-9149-ceb60c1c1cda/a058273e-9c68-4d73-9149-ceb60c1c1cda.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1657.298454] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27cfb12d-6434-4072-b292-064764641a24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.305751] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1657.305751] env[62508]: value = "task-1776247" [ 1657.305751] env[62508]: _type = "Task" [ 1657.305751] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.314696] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.386141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.388723] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.832s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.411943] env[62508]: INFO nova.scheduler.client.report [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Deleted allocations for instance 091a11ef-d6c7-4f04-90a6-273da14ce88b [ 1657.511914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.512728] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Instance network_info: |[{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1657.512728] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Acquired lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.512989] env[62508]: DEBUG nova.network.neutron [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Refreshing network info cache for port 0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1657.514697] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:4c:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0eda6157-2402-4297-8eb5-07a5b94eba56', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1657.522690] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Creating folder: Project (4e682eb7bbd240afb2f6581c7478b99c). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1657.523296] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a905dbf-478c-4143-969b-545286b8fcb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.527040] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1657.538944] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Created folder: Project (4e682eb7bbd240afb2f6581c7478b99c) in parent group-v368536. [ 1657.539165] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Creating folder: Instances. Parent ref: group-v368761. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1657.539415] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25048782-e405-4764-8ac1-8a2bd7b54dea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.548998] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Created folder: Instances in parent group-v368761. [ 1657.549266] env[62508]: DEBUG oslo.service.loopingcall [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1657.549458] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1657.549676] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9015b79d-24c9-4527-9a3c-c61698d8713d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.566461] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1657.566725] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1657.566885] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1657.567081] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1657.567232] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1657.567399] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1657.567620] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1657.567781] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1657.567946] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1657.568125] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1657.568303] env[62508]: DEBUG nova.virt.hardware [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1657.569518] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc97b90-2a48-4079-ab84-4c310d594034 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.577626] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b0f461-5eea-4a05-aff1-2e800256067e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.582578] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1657.582578] env[62508]: value = "task-1776250" [ 1657.582578] env[62508]: _type = "Task" [ 1657.582578] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.600875] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776250, 'name': CreateVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.654242] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.654545] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.654791] env[62508]: INFO nova.compute.manager [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Attaching volume 7a067ab9-712c-402e-b7e2-94111951d17b to /dev/sdb [ 1657.691741] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bf0011-42fb-4954-bd6f-2b4772e8f38a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.701883] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1412030-f596-49ab-ab4b-e33e317c66c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.717529] env[62508]: DEBUG nova.virt.block_device [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating existing volume attachment record: 7611ab3d-c30c-48d2-8a2d-b5a5ff6e37d1 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1657.820680] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776247, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.921828] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e8b3970a-c042-4160-8039-4361f4836127 tempest-ImagesTestJSON-1000061352 tempest-ImagesTestJSON-1000061352-project-member] Lock "091a11ef-d6c7-4f04-90a6-273da14ce88b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.786s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.940550] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Successfully updated port: 12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1658.095574] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776250, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.321889] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.821722} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.322196] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a058273e-9c68-4d73-9149-ceb60c1c1cda/a058273e-9c68-4d73-9149-ceb60c1c1cda.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1658.322423] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1658.322678] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e13502b-3961-4f1b-acf2-1966d8718423 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.330354] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1658.330354] env[62508]: value = "task-1776254" [ 1658.330354] env[62508]: _type = "Task" [ 1658.330354] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.338507] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.358445] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac761b4-82af-4d2a-99d8-fbf20f868de6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.366986] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057fabcf-a0df-4870-8e01-e3a10ace5368 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.401371] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4730a45-54fd-4dbc-9ac8-8a48d129420a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.410224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0ba272-e39a-4d2f-9cb3-2a398baa6985 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.425752] env[62508]: DEBUG nova.compute.provider_tree [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.444947] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.444947] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.444947] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1658.569380] env[62508]: DEBUG nova.network.neutron [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updated VIF entry in instance network info cache for port 0eda6157-2402-4297-8eb5-07a5b94eba56. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1658.569814] env[62508]: DEBUG nova.network.neutron [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.595130] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776250, 'name': CreateVM_Task, 'duration_secs': 0.615354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.595366] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1658.595976] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.596164] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.596527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1658.596782] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6d9f744-4164-4818-89e6-4440e8fae147 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.601636] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1658.601636] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5287537d-59ee-236c-0ef0-00d91cf47cb6" [ 1658.601636] env[62508]: _type = "Task" [ 1658.601636] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.609950] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5287537d-59ee-236c-0ef0-00d91cf47cb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.840259] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087271} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.840578] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1658.844214] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bec5151-47ad-4fe9-94ac-b253977b9b03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.864901] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] a058273e-9c68-4d73-9149-ceb60c1c1cda/a058273e-9c68-4d73-9149-ceb60c1c1cda.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1658.865267] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7bacf31-2aaf-488f-b005-c02d81b3b7a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.885616] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1658.885616] env[62508]: value = "task-1776255" [ 1658.885616] env[62508]: _type = "Task" [ 1658.885616] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.894163] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.932545] env[62508]: DEBUG nova.scheduler.client.report [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1658.989022] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1659.074182] env[62508]: DEBUG oslo_concurrency.lockutils [req-d30747c9-2e93-4ef3-b517-2619d8081d15 req-9bb3a74a-210a-49aa-a27c-d3de64998827 service nova] Releasing lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.122408] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5287537d-59ee-236c-0ef0-00d91cf47cb6, 'name': SearchDatastore_Task, 'duration_secs': 0.009554} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.122408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.122408] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1659.122408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.122408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.122408] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1659.122408] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b2acd12-9e01-403d-8df5-afb06fc40324 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.132137] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1659.132380] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1659.133114] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb9748a-2fa4-4ba9-8682-03a4a162cf19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.138713] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1659.138713] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d76b-db4d-f02d-6fb2-34c78781dfb6" [ 1659.138713] env[62508]: _type = "Task" [ 1659.138713] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.148268] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d76b-db4d-f02d-6fb2-34c78781dfb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.152340] env[62508]: DEBUG nova.network.neutron [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updating instance_info_cache with network_info: [{"id": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "address": "fa:16:3e:b3:d5:8c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fd1aa1-46", "ovs_interfaceid": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.259907] env[62508]: DEBUG nova.compute.manager [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Received event network-vif-plugged-12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.261659] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Acquiring lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.261889] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.262203] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.262273] env[62508]: DEBUG nova.compute.manager [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] No waiting events found dispatching network-vif-plugged-12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1659.262432] env[62508]: WARNING nova.compute.manager [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Received unexpected event network-vif-plugged-12fd1aa1-4663-49bc-8123-396a0fedaeb8 for instance with vm_state building and task_state spawning. [ 1659.262596] env[62508]: DEBUG nova.compute.manager [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Received event network-changed-12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.262748] env[62508]: DEBUG nova.compute.manager [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Refreshing instance network info cache due to event network-changed-12fd1aa1-4663-49bc-8123-396a0fedaeb8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1659.262912] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Acquiring lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.396384] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776255, 'name': ReconfigVM_Task, 'duration_secs': 0.448725} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.396805] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfigured VM instance instance-0000004f to attach disk [datastore1] a058273e-9c68-4d73-9149-ceb60c1c1cda/a058273e-9c68-4d73-9149-ceb60c1c1cda.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1659.397468] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e84e0448-fec5-436b-b7ba-47a7d57de8a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.405393] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1659.405393] env[62508]: value = "task-1776257" [ 1659.405393] env[62508]: _type = "Task" [ 1659.405393] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.415049] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776257, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.650308] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5280d76b-db4d-f02d-6fb2-34c78781dfb6, 'name': SearchDatastore_Task, 'duration_secs': 0.012056} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.651117] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e9dfb7-4611-4dda-8c52-cc175190c644 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.655068] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.655369] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Instance network_info: |[{"id": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "address": "fa:16:3e:b3:d5:8c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fd1aa1-46", "ovs_interfaceid": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1659.655639] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Acquired lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.656038] env[62508]: DEBUG nova.network.neutron [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Refreshing network info cache for port 12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1659.657361] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:d5:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12fd1aa1-4663-49bc-8123-396a0fedaeb8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1659.665232] env[62508]: DEBUG oslo.service.loopingcall [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.669674] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1659.670395] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1659.670395] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c0f762-e456-8222-3503-989cd6a16e7e" [ 1659.670395] env[62508]: _type = "Task" [ 1659.670395] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.670649] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f50702b-54b9-4b11-8743-3bb9c11edaa2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.696636] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c0f762-e456-8222-3503-989cd6a16e7e, 'name': SearchDatastore_Task, 'duration_secs': 0.012631} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.698095] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.698369] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 24091abb-f71f-4528-8fc5-b97725cf079e/24091abb-f71f-4528-8fc5-b97725cf079e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1659.698614] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1659.698614] env[62508]: value = "task-1776258" [ 1659.698614] env[62508]: _type = "Task" [ 1659.698614] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.698802] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4d0cc65-d00c-4991-8793-311eed5e5f62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.710452] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776258, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.711907] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1659.711907] env[62508]: value = "task-1776259" [ 1659.711907] env[62508]: _type = "Task" [ 1659.711907] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.721608] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.770044] env[62508]: INFO nova.compute.manager [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Rebuilding instance [ 1659.830270] env[62508]: DEBUG nova.compute.manager [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1659.831264] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f707ee0-7476-4a47-8fd1-1417346c57c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.921822] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776257, 'name': Rename_Task, 'duration_secs': 0.179936} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.922456] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1659.922456] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dce72b97-26c2-444a-b5b8-f50cab6da9ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.932032] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1659.932032] env[62508]: value = "task-1776260" [ 1659.932032] env[62508]: _type = "Task" [ 1659.932032] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.945131] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.556s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.952039] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.108947] env[62508]: DEBUG nova.network.neutron [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updated VIF entry in instance network info cache for port 12fd1aa1-4663-49bc-8123-396a0fedaeb8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1660.108947] env[62508]: DEBUG nova.network.neutron [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updating instance_info_cache with network_info: [{"id": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "address": "fa:16:3e:b3:d5:8c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fd1aa1-46", "ovs_interfaceid": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.191802] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1660.193238] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3341b180-65b8-4a24-b562-aaf2bd7c6d6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.201637] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1660.201637] env[62508]: ERROR oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk due to incomplete transfer. [ 1660.205230] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d366c6f3-2097-4051-800f-36cbf91e7c1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.213364] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776258, 'name': CreateVM_Task, 'duration_secs': 0.482952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.218642] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1660.218852] env[62508]: DEBUG oslo_vmware.rw_handles [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52764eff-a3bf-f9e9-83b0-0ee0fc6acc52/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1660.219114] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Uploaded image 3590bb13-f252-467d-906d-58e26d1c5029 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1660.221760] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1660.222856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.223240] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.223707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1660.224018] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-97baf53d-ce50-4a79-98d0-f7d4299eea16 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.226550] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c67d5bdb-64ee-40da-9571-1f34a6ce74ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.233053] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506903} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.234043] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 24091abb-f71f-4528-8fc5-b97725cf079e/24091abb-f71f-4528-8fc5-b97725cf079e.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1660.234251] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1660.234448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17e0e5c1-55ed-4bf7-af49-5584f879a1d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.242192] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1660.242192] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c82d0-5a59-3956-2427-11b2fcb0e815" [ 1660.242192] env[62508]: _type = "Task" [ 1660.242192] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.242565] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1660.242565] env[62508]: value = "task-1776262" [ 1660.242565] env[62508]: _type = "Task" [ 1660.242565] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.253227] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1660.253227] env[62508]: value = "task-1776263" [ 1660.253227] env[62508]: _type = "Task" [ 1660.253227] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.264959] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776262, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.265276] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c82d0-5a59-3956-2427-11b2fcb0e815, 'name': SearchDatastore_Task, 'duration_secs': 0.013544} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.265989] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.266335] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1660.266542] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.266718] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.266916] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.267215] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7f2026a-6539-45a4-b1ea-e5ebb22a5d85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.272105] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.280917] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.281148] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.281915] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7664384-a598-460c-9d0c-e5a94c191158 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.288834] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1660.288834] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52991384-878d-f6b0-77dd-db2abfd48f99" [ 1660.288834] env[62508]: _type = "Task" [ 1660.288834] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.299936] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52991384-878d-f6b0-77dd-db2abfd48f99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.348646] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1660.348976] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b3f8ba5-9926-41c3-9ab2-ba53a44678af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.357783] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1660.357783] env[62508]: value = "task-1776264" [ 1660.357783] env[62508]: _type = "Task" [ 1660.357783] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.367802] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776264, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.443056] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776260, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.515594] env[62508]: INFO nova.scheduler.client.report [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocation for migration 8172f41b-4bbf-440a-a697-df783272e5c0 [ 1660.613109] env[62508]: DEBUG oslo_concurrency.lockutils [req-4c04e540-5a9a-4c81-81cf-775d4d165373 req-f2f37a90-5037-4a58-9e3e-83989be7cb77 service nova] Releasing lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.753814] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776262, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.762984] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114319} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.763345] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1660.764114] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470fd97d-ed23-405c-9a92-644660d1099e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.787306] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 24091abb-f71f-4528-8fc5-b97725cf079e/24091abb-f71f-4528-8fc5-b97725cf079e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1660.787660] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd225bc5-cbf5-4b58-a7b5-ad4064080db9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.814752] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52991384-878d-f6b0-77dd-db2abfd48f99, 'name': SearchDatastore_Task, 'duration_secs': 0.013068} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.816768] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1660.816768] env[62508]: value = "task-1776265" [ 1660.816768] env[62508]: _type = "Task" [ 1660.816768] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.817057] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f1606e-81d1-4f4b-9bcd-e5d031cab3c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.827092] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1660.827092] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5282fc8d-d9dd-2605-ef77-d3e4b08b8b02" [ 1660.827092] env[62508]: _type = "Task" [ 1660.827092] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.830940] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776265, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.843021] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5282fc8d-d9dd-2605-ef77-d3e4b08b8b02, 'name': SearchDatastore_Task, 'duration_secs': 0.011129} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.843311] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.843571] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e875f30e-2c25-46a4-8c74-36f08e7eb982/e875f30e-2c25-46a4-8c74-36f08e7eb982.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1660.843834] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b4fc088-2df1-4bcd-9faa-d5734052167f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.852215] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1660.852215] env[62508]: value = "task-1776266" [ 1660.852215] env[62508]: _type = "Task" [ 1660.852215] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.864951] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.871877] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776264, 'name': PowerOffVM_Task, 'duration_secs': 0.254697} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.872794] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1660.872794] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1660.873704] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b1af91-b64f-442b-acb9-d83aca17accd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.881861] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1660.882159] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc0545dd-446c-4dae-a787-e35bb4c854db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.950424] env[62508]: DEBUG oslo_vmware.api [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776260, 'name': PowerOnVM_Task, 'duration_secs': 0.786733} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.951156] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1660.951462] env[62508]: INFO nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Took 8.41 seconds to spawn the instance on the hypervisor. [ 1660.951750] env[62508]: DEBUG nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1660.953034] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d06338-3e0e-435e-8d44-cf653f24fc6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.009857] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1661.010212] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1661.010443] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleting the datastore file [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1661.010752] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6297136c-dbd0-4204-9866-ddeaea6f6bf0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.020029] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1661.020029] env[62508]: value = "task-1776268" [ 1661.020029] env[62508]: _type = "Task" [ 1661.020029] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.024667] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ece412cc-3040-41d7-89a7-8696b92284ff tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.004s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.032487] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.261783] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776262, 'name': Destroy_Task, 'duration_secs': 0.717334} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.262537] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroyed the VM [ 1661.262537] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1661.263204] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b37b052c-2c1e-48c1-b577-98df1d76e298 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.271595] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1661.271595] env[62508]: value = "task-1776269" [ 1661.271595] env[62508]: _type = "Task" [ 1661.271595] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.282342] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776269, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.330819] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776265, 'name': ReconfigVM_Task, 'duration_secs': 0.370689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.331136] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 24091abb-f71f-4528-8fc5-b97725cf079e/24091abb-f71f-4528-8fc5-b97725cf079e.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1661.331821] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f935a5a-8dd9-4b81-b4be-4c41f4cb1f31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.339331] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1661.339331] env[62508]: value = "task-1776270" [ 1661.339331] env[62508]: _type = "Task" [ 1661.339331] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.356864] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776270, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.366522] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481532} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.366800] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e875f30e-2c25-46a4-8c74-36f08e7eb982/e875f30e-2c25-46a4-8c74-36f08e7eb982.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1661.367044] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1661.367314] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2b9c1a5-abd3-4b1b-8072-c7c495d74cd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.378018] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1661.378018] env[62508]: value = "task-1776271" [ 1661.378018] env[62508]: _type = "Task" [ 1661.378018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.387857] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.451666] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.452076] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.452245] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.452467] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.453861] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.455132] env[62508]: INFO nova.compute.manager [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Terminating instance [ 1661.458790] env[62508]: DEBUG nova.compute.manager [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1661.458950] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1661.459803] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a84267-35e1-4a70-bd82-e942ec423eb8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.473920] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.476045] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a09c3b54-8f6a-4255-bb0c-80d43edaca84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.478937] env[62508]: INFO nova.compute.manager [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Took 17.48 seconds to build instance. [ 1661.483661] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1661.483661] env[62508]: value = "task-1776272" [ 1661.483661] env[62508]: _type = "Task" [ 1661.483661] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.493582] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.529713] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420644} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.530015] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1661.530209] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1661.530386] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1661.564249] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1661.564527] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1661.565411] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316f28bb-c5a8-47a3-9620-4fd6963dcddd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.584824] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c33174a-9dfe-4539-8ec2-0ffd7c857f67 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.620793] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1661.621134] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19ac907d-95d9-4d44-80a6-21e6563d7007 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.642855] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1661.642855] env[62508]: value = "task-1776273" [ 1661.642855] env[62508]: _type = "Task" [ 1661.642855] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.652158] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.785318] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776269, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.849924] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776270, 'name': Rename_Task, 'duration_secs': 0.17205} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.850166] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1661.850428] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f1e0574-f1d6-4c91-8d23-954fa33e3905 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.862518] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1661.862518] env[62508]: value = "task-1776274" [ 1661.862518] env[62508]: _type = "Task" [ 1661.862518] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.875057] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.886212] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077072} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.886536] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1661.887366] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683330ca-fadf-4051-9109-de4489b2b50b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.921958] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] e875f30e-2c25-46a4-8c74-36f08e7eb982/e875f30e-2c25-46a4-8c74-36f08e7eb982.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1661.922165] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29523f0a-85ef-4ee4-b747-b797add3c368 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.945436] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1661.945436] env[62508]: value = "task-1776275" [ 1661.945436] env[62508]: _type = "Task" [ 1661.945436] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.955997] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.980821] env[62508]: DEBUG oslo_concurrency.lockutils [None req-27b0d1f0-bfad-48d7-9b6f-e7b0920ed783 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.989s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.995163] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776272, 'name': PowerOffVM_Task, 'duration_secs': 0.378238} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.996783] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1661.996983] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1662.000058] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a23a2e3a-6b53-4f56-834f-17cb610613bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.002180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "c45b2c35-e58a-4ffa-861a-980747e552a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.002406] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.153860] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776273, 'name': ReconfigVM_Task, 'duration_secs': 0.398497} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.154159] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to attach disk [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1662.159183] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-441bcd37-f268-4b13-8b86-6577ca055d6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.177933] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1662.177933] env[62508]: value = "task-1776277" [ 1662.177933] env[62508]: _type = "Task" [ 1662.177933] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.186510] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.213368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.213643] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.213905] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] 2e32ca83-8506-4588-bd33-4eadb7d2d30a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.214291] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82be7724-d06e-4c84-a2fc-02efc131ca4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.223030] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1662.223030] env[62508]: value = "task-1776278" [ 1662.223030] env[62508]: _type = "Task" [ 1662.223030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.236322] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.285255] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1662.285630] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368765', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'name': 'volume-7a067ab9-712c-402e-b7e2-94111951d17b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'deee2c81-4d2c-47d3-aae6-ef829d59c644', 'attached_at': '', 'detached_at': '', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'serial': '7a067ab9-712c-402e-b7e2-94111951d17b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1662.286891] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff09fae5-82bf-47e7-8904-c51a36f7682b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.299269] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776269, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.321698] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b45dab1-15b0-45fb-88f7-36fe0380720e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.360588] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] volume-7a067ab9-712c-402e-b7e2-94111951d17b/volume-7a067ab9-712c-402e-b7e2-94111951d17b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1662.360955] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41a5a81c-923b-4e5f-8cc5-b995338801b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.386573] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776274, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.386573] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1662.386573] env[62508]: value = "task-1776279" [ 1662.386573] env[62508]: _type = "Task" [ 1662.386573] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.394559] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776279, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.457495] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776275, 'name': ReconfigVM_Task, 'duration_secs': 0.416452} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.457827] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Reconfigured VM instance instance-00000051 to attach disk [datastore1] e875f30e-2c25-46a4-8c74-36f08e7eb982/e875f30e-2c25-46a4-8c74-36f08e7eb982.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1662.458617] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4742cf3c-0356-480a-b711-404a594e496b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.471549] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1662.471549] env[62508]: value = "task-1776280" [ 1662.471549] env[62508]: _type = "Task" [ 1662.471549] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.482796] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776280, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.505743] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1662.582599] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1662.582854] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1662.583019] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1662.583211] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1662.583446] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1662.583735] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1662.583777] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1662.584551] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1662.584551] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1662.584551] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1662.584551] env[62508]: DEBUG nova.virt.hardware [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1662.585934] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee3bd90-2cf5-4d50-b431-fc68bf29ff11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.597129] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff02966-4aa5-47b8-a574-be127204e1cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.615637] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:87:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c969ec53-5930-48a3-bad6-aaa89e2519c6', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1662.630245] env[62508]: DEBUG oslo.service.loopingcall [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.630245] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1662.630245] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4a1644f-adf5-4b02-9352-64061cca85a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.657345] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1662.657345] env[62508]: value = "task-1776281" [ 1662.657345] env[62508]: _type = "Task" [ 1662.657345] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.666844] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776281, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.687255] env[62508]: DEBUG oslo_vmware.api [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776277, 'name': ReconfigVM_Task, 'duration_secs': 0.199963} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.687619] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1662.734667] env[62508]: DEBUG oslo_vmware.api [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308149} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.735268] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1662.735365] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1662.735542] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1662.735717] env[62508]: INFO nova.compute.manager [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1662.736679] env[62508]: DEBUG oslo.service.loopingcall [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.736979] env[62508]: DEBUG nova.compute.manager [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1662.737059] env[62508]: DEBUG nova.network.neutron [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1662.783742] env[62508]: DEBUG oslo_vmware.api [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776269, 'name': RemoveSnapshot_Task, 'duration_secs': 1.223054} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.784026] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1662.784365] env[62508]: INFO nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 20.95 seconds to snapshot the instance on the hypervisor. [ 1662.835190] env[62508]: DEBUG nova.compute.manager [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-changed-be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1662.835433] env[62508]: DEBUG nova.compute.manager [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing instance network info cache due to event network-changed-be5b5d9b-1f22-455a-b4f6-128f17030129. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1662.835758] env[62508]: DEBUG oslo_concurrency.lockutils [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.835819] env[62508]: DEBUG oslo_concurrency.lockutils [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.836618] env[62508]: DEBUG nova.network.neutron [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing network info cache for port be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1662.888017] env[62508]: DEBUG oslo_vmware.api [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776274, 'name': PowerOnVM_Task, 'duration_secs': 0.633928} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.892357] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1662.892565] env[62508]: INFO nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1662.892758] env[62508]: DEBUG nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1662.893537] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c040a3-be21-4f91-a561-ea4dc305776b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.903256] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.982102] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776280, 'name': Rename_Task, 'duration_secs': 0.412952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.982553] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1662.982645] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f495174-cc0c-47fe-91f7-f617841267b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.991130] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1662.991130] env[62508]: value = "task-1776282" [ 1662.991130] env[62508]: _type = "Task" [ 1662.991130] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.001216] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776282, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.034202] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.034516] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.035992] env[62508]: INFO nova.compute.claims [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1663.047678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.047974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.048206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.048399] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.048604] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.050736] env[62508]: INFO nova.compute.manager [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Terminating instance [ 1663.052560] env[62508]: DEBUG nova.compute.manager [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1663.052766] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.053623] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07296941-bff1-48c2-9379-ae95ac109091 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.063236] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.063537] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f91d7b5d-15f5-49f6-8159-02e49055a862 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.073960] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1663.073960] env[62508]: value = "task-1776283" [ 1663.073960] env[62508]: _type = "Task" [ 1663.073960] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.089033] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1776283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.170633] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776281, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.349301] env[62508]: DEBUG nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Found 3 images (rotation: 2) {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1663.349301] env[62508]: DEBUG nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Rotating out 1 backups {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1663.349477] env[62508]: DEBUG nova.compute.manager [None req-db7f6735-33f5-4f8f-9a78-80e0d5c518af tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleting image 57b290e8-da3a-4e9f-9233-d8f772b973bf {{(pid=62508) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1663.381109] env[62508]: DEBUG nova.compute.manager [req-96ab8343-089f-4a46-a609-eb40da97859c req-3f7cceee-adea-46dd-a4af-2eb3ae2139e7 service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Received event network-vif-deleted-8834c92d-7c01-4079-9e5c-7fbe2b25d73e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1663.381109] env[62508]: INFO nova.compute.manager [req-96ab8343-089f-4a46-a609-eb40da97859c req-3f7cceee-adea-46dd-a4af-2eb3ae2139e7 service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Neutron deleted interface 8834c92d-7c01-4079-9e5c-7fbe2b25d73e; detaching it from the instance and deleting it from the info cache [ 1663.381109] env[62508]: DEBUG nova.network.neutron [req-96ab8343-089f-4a46-a609-eb40da97859c req-3f7cceee-adea-46dd-a4af-2eb3ae2139e7 service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.399956] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776279, 'name': ReconfigVM_Task, 'duration_secs': 0.849762} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.402891] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfigured VM instance instance-0000003e to attach disk [datastore1] volume-7a067ab9-712c-402e-b7e2-94111951d17b/volume-7a067ab9-712c-402e-b7e2-94111951d17b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.410266] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8c54b09-6087-4451-b3f8-744d99d89f2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.434788] env[62508]: INFO nova.compute.manager [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Took 18.82 seconds to build instance. [ 1663.441829] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1663.441829] env[62508]: value = "task-1776284" [ 1663.441829] env[62508]: _type = "Task" [ 1663.441829] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.459342] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776284, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.504038] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776282, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.585541] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1776283, 'name': PowerOffVM_Task, 'duration_secs': 0.279838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.585856] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1663.585978] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1663.586270] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4659066b-8389-4be4-8a3e-0f4b935f0342 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.670581] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776281, 'name': CreateVM_Task, 'duration_secs': 0.673054} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.670786] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1663.671691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.671862] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.672212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1663.672693] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c5f30d2-a0f1-4267-a591-6374ff04523a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.681847] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1663.681847] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ba776-8097-83f1-f429-a6e61bf3c942" [ 1663.681847] env[62508]: _type = "Task" [ 1663.681847] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.693430] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523ba776-8097-83f1-f429-a6e61bf3c942, 'name': SearchDatastore_Task, 'duration_secs': 0.011052} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.693430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.693430] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.693430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.693628] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.694022] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.694022] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-351055f7-068f-4712-96a4-329445de2049 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.703961] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.704212] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1663.705611] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2a66b4f-cbbf-4429-82f2-87ab371827e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.714419] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1663.714419] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eba90-dd50-bc3f-e16b-1d0f02cc9bd3" [ 1663.714419] env[62508]: _type = "Task" [ 1663.714419] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.725224] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eba90-dd50-bc3f-e16b-1d0f02cc9bd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.761605] env[62508]: DEBUG nova.objects.instance [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'flavor' on Instance uuid 145306d7-f0e8-46c0-b2ab-1c41c208f976 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1663.775237] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.775558] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.775661] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleting the datastore file [datastore1] e07ab22e-bd07-4232-abfe-c0617c0b9813 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.775932] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b32122a-e91b-42d4-bac1-303c95aac9dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.785359] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for the task: (returnval){ [ 1663.785359] env[62508]: value = "task-1776286" [ 1663.785359] env[62508]: _type = "Task" [ 1663.785359] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.795140] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1776286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.807477] env[62508]: DEBUG nova.network.neutron [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updated VIF entry in instance network info cache for port be5b5d9b-1f22-455a-b4f6-128f17030129. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1663.807910] env[62508]: DEBUG nova.network.neutron [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.839706] env[62508]: DEBUG nova.network.neutron [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.883755] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28f5296a-b728-4cbb-943c-5980bb824a59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.895212] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edd72bf-48fe-480e-9f34-609a95384c49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.937713] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0fbd4af0-ab32-461d-8993-337ef6366b32 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.331s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.938863] env[62508]: DEBUG nova.compute.manager [req-96ab8343-089f-4a46-a609-eb40da97859c req-3f7cceee-adea-46dd-a4af-2eb3ae2139e7 service nova] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Detach interface failed, port_id=8834c92d-7c01-4079-9e5c-7fbe2b25d73e, reason: Instance 2e32ca83-8506-4588-bd33-4eadb7d2d30a could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1663.953970] env[62508]: DEBUG oslo_vmware.api [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776284, 'name': ReconfigVM_Task, 'duration_secs': 0.22197} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.954551] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368765', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'name': 'volume-7a067ab9-712c-402e-b7e2-94111951d17b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'deee2c81-4d2c-47d3-aae6-ef829d59c644', 'attached_at': '', 'detached_at': '', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'serial': '7a067ab9-712c-402e-b7e2-94111951d17b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1664.003353] env[62508]: DEBUG oslo_vmware.api [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776282, 'name': PowerOnVM_Task, 'duration_secs': 0.669517} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.003674] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1664.003920] env[62508]: INFO nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Took 6.48 seconds to spawn the instance on the hypervisor. [ 1664.004231] env[62508]: DEBUG nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1664.005294] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdaaf83-3b29-4941-b671-3cd0af7227c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.234096] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521eba90-dd50-bc3f-e16b-1d0f02cc9bd3, 'name': SearchDatastore_Task, 'duration_secs': 0.017872} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.239641] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29abe742-d1c1-4eea-9029-0eca1a78c399 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.249202] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1664.249202] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523e1e81-6d54-3b06-934e-4c585848dc67" [ 1664.249202] env[62508]: _type = "Task" [ 1664.249202] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.272504] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523e1e81-6d54-3b06-934e-4c585848dc67, 'name': SearchDatastore_Task, 'duration_secs': 0.013644} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.273568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.273568] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1664.273877] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3fb35b95-2de3-47cf-bcce-52cc68e4fb25 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.327s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.274874] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c53ef89-63f4-42b9-baac-bc5bf82feda2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.286297] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1664.286297] env[62508]: value = "task-1776287" [ 1664.286297] env[62508]: _type = "Task" [ 1664.286297] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.310600] env[62508]: DEBUG oslo_vmware.api [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Task: {'id': task-1776286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207117} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.311485] env[62508]: DEBUG oslo_concurrency.lockutils [req-79e57cbe-0e7c-4909-bf30-453af99a9848 req-94cb3630-bced-43ee-b618-9404c1f9828a service nova] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.313527] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.317338] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.317338] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.317338] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.317338] env[62508]: INFO nova.compute.manager [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1664.317338] env[62508]: DEBUG oslo.service.loopingcall [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.317338] env[62508]: DEBUG nova.compute.manager [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1664.317338] env[62508]: DEBUG nova.network.neutron [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.342774] env[62508]: INFO nova.compute.manager [-] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Took 1.61 seconds to deallocate network for instance. [ 1664.499566] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f092718-d3bc-453b-b8ae-14684d42067b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.511169] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a5b735-8088-4382-87c6-fd9c7e8df606 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.552045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4041cdce-e851-415d-8fec-e0c6276111bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.554979] env[62508]: INFO nova.compute.manager [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Took 19.88 seconds to build instance. [ 1664.563785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dc7d62-0645-417a-bb53-f926686ff230 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.581656] env[62508]: DEBUG nova.compute.provider_tree [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1664.803936] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776287, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.853220] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.018592] env[62508]: DEBUG nova.objects.instance [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lazy-loading 'flavor' on Instance uuid deee2c81-4d2c-47d3-aae6-ef829d59c644 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.058394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bdbc1a0b-1667-4bb6-8c6a-3ad051ab3172 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.398s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.093994] env[62508]: INFO nova.compute.manager [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Rebuilding instance [ 1665.117614] env[62508]: ERROR nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [req-7b6f6868-14ea-4844-901f-9f4c9e6324ed] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b6f6868-14ea-4844-901f-9f4c9e6324ed"}]} [ 1665.142463] env[62508]: DEBUG nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1665.159469] env[62508]: DEBUG nova.compute.manager [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1665.159638] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc6f08d-2cef-4330-99ae-562d4b241ce5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.163467] env[62508]: DEBUG nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1665.163685] env[62508]: DEBUG nova.compute.provider_tree [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.183220] env[62508]: DEBUG nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1665.206936] env[62508]: DEBUG nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1665.307938] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.826602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.308238] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1665.308459] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1665.308759] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d86ad21f-5a01-476c-b3c0-40d4c2bd0cfe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.317700] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1665.317700] env[62508]: value = "task-1776288" [ 1665.317700] env[62508]: _type = "Task" [ 1665.317700] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.335486] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.524971] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4a1b62-5d05-48b8-a440-594095c85c29 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.870s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.554227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.554797] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.589497] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc9f761-5bc7-4c79-9015-53610d3367d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.599207] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4586458-be8d-4832-a2f5-d9b9d50079cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.605812] env[62508]: DEBUG nova.compute.manager [req-8968b51d-0604-4038-8c36-a1d9aea6b385 req-1e48a2e5-c2ad-409b-8248-baf032d40cb9 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Received event network-vif-deleted-971a1f15-0acf-4dbe-a120-b172a0eb9552 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1665.605945] env[62508]: INFO nova.compute.manager [req-8968b51d-0604-4038-8c36-a1d9aea6b385 req-1e48a2e5-c2ad-409b-8248-baf032d40cb9 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Neutron deleted interface 971a1f15-0acf-4dbe-a120-b172a0eb9552; detaching it from the instance and deleting it from the info cache [ 1665.606239] env[62508]: DEBUG nova.network.neutron [req-8968b51d-0604-4038-8c36-a1d9aea6b385 req-1e48a2e5-c2ad-409b-8248-baf032d40cb9 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.637598] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f561ef-4752-4998-bc95-3433233d82f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.640214] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de2b8154-dbc5-4571-93a2-d5f69d727a44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.650591] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32766b8d-2e8e-4313-b744-3bc7c4734c25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.657778] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8ded42-cfa6-4875-833e-b95fb95847b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.682281] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1665.682561] env[62508]: DEBUG nova.compute.provider_tree [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.698053] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50918e0a-ae6b-48ad-b94b-2ec3fa0d4c52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.700774] env[62508]: DEBUG nova.compute.manager [req-8968b51d-0604-4038-8c36-a1d9aea6b385 req-1e48a2e5-c2ad-409b-8248-baf032d40cb9 service nova] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Detach interface failed, port_id=971a1f15-0acf-4dbe-a120-b172a0eb9552, reason: Instance e07ab22e-bd07-4232-abfe-c0617c0b9813 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1665.709993] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1665.709993] env[62508]: value = "task-1776289" [ 1665.709993] env[62508]: _type = "Task" [ 1665.709993] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.723433] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.726170] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Acquiring lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.726632] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Acquired lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.726717] env[62508]: DEBUG nova.network.neutron [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1665.740640] env[62508]: DEBUG nova.scheduler.client.report [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1665.740931] env[62508]: DEBUG nova.compute.provider_tree [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 121 to 122 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1665.741129] env[62508]: DEBUG nova.compute.provider_tree [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.829414] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089548} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.829782] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1665.831120] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951f9f40-e1b7-4249-a04a-ac9404e608d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.855347] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1665.856127] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49e66aed-ac8e-4540-a426-39e879ab4adf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.877844] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1665.877844] env[62508]: value = "task-1776290" [ 1665.877844] env[62508]: _type = "Task" [ 1665.877844] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.882711] env[62508]: DEBUG nova.network.neutron [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.888432] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.993159] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.993159] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.063318] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1666.223640] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776289, 'name': PowerOffVM_Task, 'duration_secs': 0.256783} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.223989] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1666.246728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.212s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.247912] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1666.253780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.398s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.253780] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.287625] env[62508]: INFO nova.scheduler.client.report [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocations for instance 2e32ca83-8506-4588-bd33-4eadb7d2d30a [ 1666.302478] env[62508]: INFO nova.compute.manager [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Detaching volume 9420607e-8de8-4e29-a868-732bd5be0158 [ 1666.343911] env[62508]: INFO nova.virt.block_device [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Attempting to driver detach volume 9420607e-8de8-4e29-a868-732bd5be0158 from mountpoint /dev/sdb [ 1666.344334] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1666.346017] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1666.346017] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc54674-04de-4dc7-93d7-91d3895df37a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.375917] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a578b0a-1c60-4d07-b064-39bc7a87b1c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.391022] env[62508]: INFO nova.compute.manager [-] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Took 2.07 seconds to deallocate network for instance. [ 1666.391378] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776290, 'name': ReconfigVM_Task, 'duration_secs': 0.350177} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.392106] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c5fcbe-5681-41e1-a5ef-1f8613caaaaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.396086] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782/4d24bacc-48c4-4649-bb29-fcae2cf77782.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1666.398848] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12c80e05-34ab-474b-bd66-033d032a856c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.431463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c38d16-c794-444d-8af4-f8abc7e86e13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.434395] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1666.434395] env[62508]: value = "task-1776291" [ 1666.434395] env[62508]: _type = "Task" [ 1666.434395] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.455137] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] The volume has not been displaced from its original location: [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1666.461735] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1666.462897] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-237d7e76-c630-42c4-9efb-b0f2ca321d81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.482833] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776291, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.490521] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1666.490521] env[62508]: value = "task-1776292" [ 1666.490521] env[62508]: _type = "Task" [ 1666.490521] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.495887] env[62508]: INFO nova.compute.manager [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Detaching volume 7a067ab9-712c-402e-b7e2-94111951d17b [ 1666.501601] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776292, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.536380] env[62508]: INFO nova.virt.block_device [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Attempting to driver detach volume 7a067ab9-712c-402e-b7e2-94111951d17b from mountpoint /dev/sdb [ 1666.536675] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1666.536882] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368765', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'name': 'volume-7a067ab9-712c-402e-b7e2-94111951d17b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'deee2c81-4d2c-47d3-aae6-ef829d59c644', 'attached_at': '', 'detached_at': '', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'serial': '7a067ab9-712c-402e-b7e2-94111951d17b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1666.537826] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99e28be-01c8-4863-9a72-498466a2b2fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.561500] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a239d348-f69e-4b9e-a6e4-05680fdadd91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.573661] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc88e58-be8d-4476-af01-a3ff7e1c0d7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.578984] env[62508]: DEBUG nova.network.neutron [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updating instance_info_cache with network_info: [{"id": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "address": "fa:16:3e:b3:d5:8c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fd1aa1-46", "ovs_interfaceid": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.603818] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.604097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.605719] env[62508]: INFO nova.compute.claims [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.608759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e8eb3c-1f02-4c6f-ac6a-a3969e5e5e11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.627540] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] The volume has not been displaced from its original location: [datastore1] volume-7a067ab9-712c-402e-b7e2-94111951d17b/volume-7a067ab9-712c-402e-b7e2-94111951d17b.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1666.632985] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfiguring VM instance instance-0000003e to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1666.633380] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78402324-8c5c-4233-adcb-1e7c2ff53b6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.656989] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1666.656989] env[62508]: value = "task-1776293" [ 1666.656989] env[62508]: _type = "Task" [ 1666.656989] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.666913] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.755020] env[62508]: DEBUG nova.compute.utils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1666.756519] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1666.756783] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1666.769382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.769719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.769975] env[62508]: DEBUG nova.compute.manager [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1666.770956] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb10457-4ea3-4f78-bfd7-085a1d6378be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.781895] env[62508]: DEBUG nova.compute.manager [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1666.782544] env[62508]: DEBUG nova.objects.instance [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1666.797888] env[62508]: DEBUG oslo_concurrency.lockutils [None req-61136993-01bc-4520-af5d-b6836c955956 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "2e32ca83-8506-4588-bd33-4eadb7d2d30a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.346s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.805787] env[62508]: DEBUG nova.policy [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c96cc4a58a4321837c1ab8badc686a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0156fba01363470eaa9771d5f296f730', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1666.905943] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.946103] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776291, 'name': Rename_Task, 'duration_secs': 0.176934} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.946388] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1666.946630] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a30a06b3-c368-4943-ab94-cb35f9606d78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.955067] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1666.955067] env[62508]: value = "task-1776294" [ 1666.955067] env[62508]: _type = "Task" [ 1666.955067] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.964561] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.008999] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776292, 'name': ReconfigVM_Task, 'duration_secs': 0.257447} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.009497] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1667.015653] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86b44c09-2957-41ad-af3e-e2480ed41d93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.033860] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1667.033860] env[62508]: value = "task-1776295" [ 1667.033860] env[62508]: _type = "Task" [ 1667.033860] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.043385] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776295, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.081140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Releasing lock "refresh_cache-e875f30e-2c25-46a4-8c74-36f08e7eb982" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.081514] env[62508]: DEBUG nova.compute.manager [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Inject network info {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1667.081697] env[62508]: DEBUG nova.compute.manager [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] network_info to inject: |[{"id": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "address": "fa:16:3e:b3:d5:8c", "network": {"id": "29c40f34-e678-48f1-94f4-d128bc6dfe71", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1416755499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3b480c3c81499599aef114f92775cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fd1aa1-46", "ovs_interfaceid": "12fd1aa1-4663-49bc-8123-396a0fedaeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1667.087121] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Reconfiguring VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1667.091031] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2e437cd-78b3-4492-b805-a5f6ffc938d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.104146] env[62508]: DEBUG oslo_vmware.api [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Waiting for the task: (returnval){ [ 1667.104146] env[62508]: value = "task-1776296" [ 1667.104146] env[62508]: _type = "Task" [ 1667.104146] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.118035] env[62508]: DEBUG oslo_vmware.api [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Task: {'id': task-1776296, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.147733] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Successfully created port: 115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1667.171168] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776293, 'name': ReconfigVM_Task, 'duration_secs': 0.487277} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.171702] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Reconfigured VM instance instance-0000003e to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1667.180443] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d55c01af-c02c-44c9-a771-4fdc7b1b878c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.203038] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1667.203038] env[62508]: value = "task-1776297" [ 1667.203038] env[62508]: _type = "Task" [ 1667.203038] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.212719] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776297, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.262476] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1667.288690] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1667.290050] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36b16a36-4532-4e78-b47b-754d1095452a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.298697] env[62508]: DEBUG oslo_vmware.api [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1667.298697] env[62508]: value = "task-1776298" [ 1667.298697] env[62508]: _type = "Task" [ 1667.298697] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.312478] env[62508]: DEBUG oslo_vmware.api [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.459012] env[62508]: DEBUG nova.objects.instance [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lazy-loading 'flavor' on Instance uuid 6afa4e73-64b4-4b10-b598-433f0c22ecb3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.474825] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776294, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.549348] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776295, 'name': ReconfigVM_Task, 'duration_secs': 0.274382} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.549793] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1667.626228] env[62508]: DEBUG oslo_vmware.api [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] Task: {'id': task-1776296, 'name': ReconfigVM_Task, 'duration_secs': 0.177899} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.627129] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9671b7-71db-4466-99a7-575fd5ba7024 tempest-ServersAdminTestJSON-1646050049 tempest-ServersAdminTestJSON-1646050049-project-admin] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Reconfigured VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1667.640079] env[62508]: DEBUG nova.compute.manager [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Received event network-changed-0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1667.640288] env[62508]: DEBUG nova.compute.manager [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Refreshing instance network info cache due to event network-changed-0eda6157-2402-4297-8eb5-07a5b94eba56. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1667.640508] env[62508]: DEBUG oslo_concurrency.lockutils [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] Acquiring lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.640653] env[62508]: DEBUG oslo_concurrency.lockutils [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] Acquired lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.640819] env[62508]: DEBUG nova.network.neutron [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Refreshing network info cache for port 0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1667.713774] env[62508]: DEBUG oslo_vmware.api [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776297, 'name': ReconfigVM_Task, 'duration_secs': 0.168387} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.716142] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368765', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'name': 'volume-7a067ab9-712c-402e-b7e2-94111951d17b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'deee2c81-4d2c-47d3-aae6-ef829d59c644', 'attached_at': '', 'detached_at': '', 'volume_id': '7a067ab9-712c-402e-b7e2-94111951d17b', 'serial': '7a067ab9-712c-402e-b7e2-94111951d17b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1667.813463] env[62508]: DEBUG oslo_vmware.api [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776298, 'name': PowerOffVM_Task, 'duration_secs': 0.26237} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.813729] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1667.813905] env[62508]: DEBUG nova.compute.manager [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1667.814957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd11b697-dfff-452f-bc66-cebe41af8b52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.907654] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291cff50-268c-4cbb-aed9-de07b76c0dde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.915967] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2a227f-f246-4ea7-a102-c30345941654 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.947939] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b23f00-b475-4bc6-9849-7d8be3726410 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.958607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c89fa9-f41b-491f-9379-04d989530e3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.969035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.969174] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.982194] env[62508]: DEBUG nova.compute.provider_tree [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.983320] env[62508]: DEBUG oslo_vmware.api [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776294, 'name': PowerOnVM_Task, 'duration_secs': 0.652421} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.983521] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1667.983721] env[62508]: DEBUG nova.compute.manager [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1667.984518] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ec41c9-afc4-4d59-b9ff-6ecd20f5a80a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.118182] env[62508]: INFO nova.compute.manager [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Rebuilding instance [ 1668.158622] env[62508]: DEBUG nova.compute.manager [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1668.161543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffabd3ea-09d4-4c7c-a5db-55096df61165 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.260725] env[62508]: DEBUG nova.objects.instance [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lazy-loading 'flavor' on Instance uuid deee2c81-4d2c-47d3-aae6-ef829d59c644 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1668.271592] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1668.300129] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1668.300763] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1668.301114] env[62508]: DEBUG nova.virt.hardware [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1668.302124] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5904ccb5-a405-4909-ba1c-ac594b68e3f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.313986] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f11d45a-8f2e-4cda-9d2e-07c1757752c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.333212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46c36471-0685-42a9-839c-277f6d5e38e4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.563s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.355088] env[62508]: DEBUG nova.network.neutron [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1668.443367] env[62508]: DEBUG nova.network.neutron [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updated VIF entry in instance network info cache for port 0eda6157-2402-4297-8eb5-07a5b94eba56. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1668.443860] env[62508]: DEBUG nova.network.neutron [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.488562] env[62508]: DEBUG nova.scheduler.client.report [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1668.501213] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.630741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1668.631083] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb872aac-6aa4-4f3f-8ebf-e900d2c1a4ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.656105] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1668.656105] env[62508]: value = "task-1776299" [ 1668.656105] env[62508]: _type = "Task" [ 1668.656105] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.667411] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1668.667678] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1668.667919] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1668.668712] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d21538-7106-464f-8569-cbdb33c3b46f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.672420] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1668.672651] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94c60853-84db-4ad0-bdd1-8134be59c700 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.696762] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6eed8dc-89ab-479a-8206-152c97bd228f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.700189] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1668.700189] env[62508]: value = "task-1776300" [ 1668.700189] env[62508]: _type = "Task" [ 1668.700189] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.706864] env[62508]: WARNING nova.virt.vmwareapi.driver [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1668.707208] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1668.711106] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ceac326-20f0-4394-8d86-40aac0af4683 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.718300] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.723830] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1668.723830] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a1236ce-a5b7-43c4-8413-351fd8c52230 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.857136] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1668.857220] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1668.857359] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.862174] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-690f1ab0-81cb-4260-8cb8-6ed4b190a20b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.862594] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Successfully updated port: 115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1668.869701] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1668.869701] env[62508]: value = "task-1776302" [ 1668.869701] env[62508]: _type = "Task" [ 1668.869701] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.882130] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.933257] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.933693] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.933865] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.934154] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.935328] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.938246] env[62508]: INFO nova.compute.manager [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Terminating instance [ 1668.943574] env[62508]: DEBUG nova.compute.manager [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1668.944088] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1668.944309] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0ce92fa-e098-4d0c-8a3a-a43696a07015 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.946590] env[62508]: DEBUG oslo_concurrency.lockutils [req-67d5b2d8-fa72-46af-8267-13b50f741ef8 req-5409fad9-e46d-4717-83f1-45eb19b8745f service nova] Releasing lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.957751] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1668.957751] env[62508]: value = "task-1776303" [ 1668.957751] env[62508]: _type = "Task" [ 1668.957751] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.970309] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.994216] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.994770] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1668.997705] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.092s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.997932] env[62508]: DEBUG nova.objects.instance [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lazy-loading 'resources' on Instance uuid e07ab22e-bd07-4232-abfe-c0617c0b9813 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.214814] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776300, 'name': PowerOffVM_Task, 'duration_secs': 0.323025} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.214814] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1669.214814] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1669.214814] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef320170-975b-42cd-b33c-9dec1c1f8c31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.227255] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1669.228552] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da8aa564-7593-4567-85c4-7aebdbc0de87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.274583] env[62508]: DEBUG oslo_concurrency.lockutils [None req-80e6f3fe-2217-495b-a974-f20902e9fbad tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.281s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.361879] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1669.362090] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1669.362377] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1669.366380] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fe3faed-60da-4792-9c76-038986a8df97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.368951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.369168] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.369942] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.377821] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1669.377821] env[62508]: value = "task-1776305" [ 1669.377821] env[62508]: _type = "Task" [ 1669.377821] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.385737] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248106} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.386408] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.386766] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1669.386975] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1669.394056] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.471286] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776303, 'name': PowerOffVM_Task, 'duration_secs': 0.200041} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.471564] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1669.471758] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1669.471956] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368657', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'name': 'volume-aea26e09-c771-424f-b682-9ea0232aedf2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f307d4d5-e877-4d0a-951c-779c1d2e573b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'serial': 'aea26e09-c771-424f-b682-9ea0232aedf2'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1669.473078] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90e7bd8-fa48-4076-ab20-905c12c1d0e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.478464] env[62508]: DEBUG nova.network.neutron [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.496600] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b6b7c5-1b0a-4550-a7e2-2e348419ec82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.505060] env[62508]: DEBUG nova.compute.utils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1669.510266] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1669.510934] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1669.522169] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1bd129-ae9e-494c-a444-e2a4ce7f0a69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.546619] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b4d61a-298c-46ff-86ee-503b5394bbc6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.551988] env[62508]: DEBUG nova.policy [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1669.568466] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] The volume has not been displaced from its original location: [datastore1] volume-aea26e09-c771-424f-b682-9ea0232aedf2/volume-aea26e09-c771-424f-b682-9ea0232aedf2.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1669.574768] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Reconfiguring VM instance instance-00000034 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1669.578104] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeffef31-89b7-4143-8956-5a836c98277c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.604210] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1669.604210] env[62508]: value = "task-1776306" [ 1669.604210] env[62508]: _type = "Task" [ 1669.604210] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.616778] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.698584] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1669.698757] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing instance network info cache due to event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1669.698988] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.886132] env[62508]: DEBUG nova.objects.instance [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lazy-loading 'flavor' on Instance uuid 6afa4e73-64b4-4b10-b598-433f0c22ecb3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.899291] env[62508]: INFO nova.virt.block_device [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Booting with volume 9420607e-8de8-4e29-a868-732bd5be0158 at /dev/sdb [ 1669.900942] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351496} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.901380] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.901558] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1669.902049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1669.924208] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1669.931620] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46a50d8-5401-4c44-a346-a336176fe9b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.941631] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968c82b2-6c16-4193-ae76-1645e2d4e4e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.981398] env[62508]: DEBUG oslo_concurrency.lockutils [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.981707] env[62508]: DEBUG nova.compute.manager [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Inject network info {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1669.982025] env[62508]: DEBUG nova.compute.manager [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] network_info to inject: |[{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1669.987826] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfiguring VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1669.991331] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Successfully created port: b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.993757] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.993933] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1669.995393] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-337b1116-921d-48b1-8a2e-7e0be8433475 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.006131] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d5e6e0-51b9-4541-a2fd-deaaae0038c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.008991] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-796c8421-6f3c-41d6-8048-d4757440b4e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.013878] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1670.026784] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8e54e1-fe0f-4bd4-9c0f-3eda7c4f990e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.032826] env[62508]: DEBUG oslo_vmware.api [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1670.032826] env[62508]: value = "task-1776307" [ 1670.032826] env[62508]: _type = "Task" [ 1670.032826] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.036606] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612ef63a-ff57-403b-b133-af87d79e87bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.070508] env[62508]: DEBUG nova.compute.provider_tree [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.081420] env[62508]: DEBUG oslo_vmware.api [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776307, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.114494] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4506b54d-8866-4a65-b620-f7a4d7f7ee1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.127407] env[62508]: DEBUG nova.compute.manager [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Stashing vm_state: stopped {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1670.144968] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.145183] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.147094] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776306, 'name': ReconfigVM_Task, 'duration_secs': 0.256553} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.148483] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Reconfigured VM instance instance-00000034 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1670.160747] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db33bd0-7dfc-4f78-b791-068fa935a332 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.179973] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05642bb1-f6f5-4347-936e-563358c7a05c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.204939] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1670.204939] env[62508]: value = "task-1776308" [ 1670.204939] env[62508]: _type = "Task" [ 1670.204939] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.232115] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5495d6-78b7-4dcc-8fdb-977ffda1fb43 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.239034] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776308, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.242672] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576f3925-80cc-4035-b66f-4de543b7be45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.265285] env[62508]: DEBUG nova.virt.block_device [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating existing volume attachment record: cea27d58-f793-44b5-9217-207a6bb7e8e3 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1670.280017] env[62508]: DEBUG nova.network.neutron [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Updating instance_info_cache with network_info: [{"id": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "address": "fa:16:3e:b6:ae:81", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap115a9f0a-d4", "ovs_interfaceid": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.395044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.422891] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updated VIF entry in instance network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1670.423365] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.563464] env[62508]: DEBUG oslo_vmware.api [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776307, 'name': ReconfigVM_Task, 'duration_secs': 0.211276} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.563464] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-40cc4109-57ae-48fa-8de9-54003edcb486 tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfigured VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1670.597767] env[62508]: ERROR nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] [req-1c9e6294-5843-4c09-9dca-2e8ffe1533a7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1c9e6294-5843-4c09-9dca-2e8ffe1533a7"}]} [ 1670.616770] env[62508]: DEBUG nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1670.633512] env[62508]: DEBUG nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1670.633846] env[62508]: DEBUG nova.compute.provider_tree [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.649436] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1670.653309] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.654191] env[62508]: DEBUG nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1670.675297] env[62508]: DEBUG nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1670.719274] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776308, 'name': ReconfigVM_Task, 'duration_secs': 0.194685} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.720323] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368657', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'name': 'volume-aea26e09-c771-424f-b682-9ea0232aedf2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f307d4d5-e877-4d0a-951c-779c1d2e573b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aea26e09-c771-424f-b682-9ea0232aedf2', 'serial': 'aea26e09-c771-424f-b682-9ea0232aedf2'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1670.720323] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1670.720759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187a8f31-5c59-44d8-a118-fbd2f63c3cc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.734180] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1670.734180] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ce2d0f7-f73a-4a3d-8149-fda0865d9cd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.786144] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.786504] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Instance network_info: |[{"id": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "address": "fa:16:3e:b6:ae:81", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap115a9f0a-d4", "ovs_interfaceid": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1670.787226] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:ae:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1670.797770] env[62508]: DEBUG oslo.service.loopingcall [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.801194] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1670.801552] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4122537-dca0-48ae-be1c-42545402208b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.827035] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1670.827035] env[62508]: value = "task-1776310" [ 1670.827035] env[62508]: _type = "Task" [ 1670.827035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.844023] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776310, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.851811] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1670.852104] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1670.852311] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Deleting the datastore file [datastore1] f307d4d5-e877-4d0a-951c-779c1d2e573b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1670.853329] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d1ac736-1125-46dd-a55d-beb36571ff5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.863215] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for the task: (returnval){ [ 1670.863215] env[62508]: value = "task-1776311" [ 1670.863215] env[62508]: _type = "Task" [ 1670.863215] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.878673] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.929507] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.931118] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Received event network-vif-plugged-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.931118] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Acquiring lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.931118] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.931118] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.931118] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] No waiting events found dispatching network-vif-plugged-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1670.931118] env[62508]: WARNING nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Received unexpected event network-vif-plugged-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 for instance with vm_state building and task_state spawning. [ 1670.931453] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Received event network-changed-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.931666] env[62508]: DEBUG nova.compute.manager [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Refreshing instance network info cache due to event network-changed-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1670.932168] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Acquiring lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.932404] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Acquired lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.932620] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Refreshing network info cache for port 115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1670.939721] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.950281] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1670.950559] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1670.950731] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1670.951702] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1670.951702] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1670.951702] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1670.951927] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1670.952171] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1670.952438] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1670.952685] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1670.953273] env[62508]: DEBUG nova.virt.hardware [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1670.955642] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212ad4a7-c1b4-4f85-9b5e-a1a2dd618dc6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.970586] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3747d837-d29d-462b-8911-65810e815884 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.995161] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:82:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8570ede3-d3fc-41d9-90a0-3dc1ef777446', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1671.004343] env[62508]: DEBUG oslo.service.loopingcall [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.012101] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1671.012382] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6054708-51ea-4ae0-829d-3cc754eca589 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.032544] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1671.043740] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1671.043740] env[62508]: value = "task-1776312" [ 1671.043740] env[62508]: _type = "Task" [ 1671.043740] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.053711] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776312, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.068369] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1671.068700] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1671.068848] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1671.069348] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1671.069348] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1671.069501] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1671.069696] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1671.069918] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1671.070059] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1671.070307] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1671.070522] env[62508]: DEBUG nova.virt.hardware [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1671.073835] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16c19f7-c46f-4a9e-a2ec-ecbfd486d030 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.086878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6703f7e2-a7ab-49ce-a78f-1dac84226788 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.140532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34f973e-0085-434c-8a18-6c96bf1bedb2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.151964] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1d2ff3-da05-4e76-8cb7-f3dc1dcd8e68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.211718] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.213118] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c708fc-a228-4b5a-a588-5cec26854bc3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.227881] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77010b6-acb1-4285-a95d-e92411466ec0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.254483] env[62508]: DEBUG nova.compute.provider_tree [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.342089] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776310, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.373785] env[62508]: DEBUG oslo_vmware.api [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Task: {'id': task-1776311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281419} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.373785] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1671.373785] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1671.373785] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1671.373940] env[62508]: INFO nova.compute.manager [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Took 2.43 seconds to destroy the instance on the hypervisor. [ 1671.374196] env[62508]: DEBUG oslo.service.loopingcall [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.374391] env[62508]: DEBUG nova.compute.manager [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1671.374482] env[62508]: DEBUG nova.network.neutron [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1671.458963] env[62508]: DEBUG nova.network.neutron [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1671.554714] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776312, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.731481] env[62508]: DEBUG nova.compute.manager [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1671.731786] env[62508]: DEBUG nova.compute.manager [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing instance network info cache due to event network-changed-d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1671.732036] env[62508]: DEBUG oslo_concurrency.lockutils [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] Acquiring lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.753259] env[62508]: DEBUG nova.compute.manager [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Received event network-vif-plugged-b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1671.753500] env[62508]: DEBUG oslo_concurrency.lockutils [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] Acquiring lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.753821] env[62508]: DEBUG oslo_concurrency.lockutils [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.753895] env[62508]: DEBUG oslo_concurrency.lockutils [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.754034] env[62508]: DEBUG nova.compute.manager [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] No waiting events found dispatching network-vif-plugged-b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1671.754226] env[62508]: WARNING nova.compute.manager [req-83916f87-8138-4404-b4af-0acf3b248f9c req-caa4d13f-8ef7-4a35-b751-9347f737719f service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Received unexpected event network-vif-plugged-b7c00805-5cf9-431e-95ea-ec950fedaa89 for instance with vm_state building and task_state spawning. [ 1671.796619] env[62508]: DEBUG nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1671.797107] env[62508]: DEBUG nova.compute.provider_tree [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 125 to 126 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1671.797419] env[62508]: DEBUG nova.compute.provider_tree [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.840758] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776310, 'name': CreateVM_Task, 'duration_secs': 0.930898} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.841618] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1671.842712] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.843025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.843523] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1671.843910] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49e87a62-c076-4096-a74e-f8dcdcc99688 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.852561] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1671.852561] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a666-9081-55b2-f977-9f8c02244841" [ 1671.852561] env[62508]: _type = "Task" [ 1671.852561] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.864022] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a666-9081-55b2-f977-9f8c02244841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.929894] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Updated VIF entry in instance network info cache for port 115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1671.930327] env[62508]: DEBUG nova.network.neutron [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Updating instance_info_cache with network_info: [{"id": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "address": "fa:16:3e:b6:ae:81", "network": {"id": "fea2205c-12b6-4c9d-8762-d76de8d1014d", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1442878417-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0156fba01363470eaa9771d5f296f730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap115a9f0a-d4", "ovs_interfaceid": "115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.056645] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776312, 'name': CreateVM_Task, 'duration_secs': 0.867029} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.058066] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Successfully updated port: b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1672.060435] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1672.061595] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.305266] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.307s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.308247] env[62508]: DEBUG nova.network.neutron [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.312481] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.811s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.312688] env[62508]: DEBUG nova.objects.instance [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1672.331699] env[62508]: INFO nova.scheduler.client.report [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Deleted allocations for instance e07ab22e-bd07-4232-abfe-c0617c0b9813 [ 1672.365984] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b4a666-9081-55b2-f977-9f8c02244841, 'name': SearchDatastore_Task, 'duration_secs': 0.017569} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.366049] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.366335] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.366537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.366691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.366982] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1672.367299] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.367629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1672.367979] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6911d920-6398-48fc-a513-4a1b4faed60a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.370367] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd6f1335-4a09-426c-9af8-6daa68463429 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.377365] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1672.377365] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f61b2-7461-9ffb-3fba-8b345c3f3ca3" [ 1672.377365] env[62508]: _type = "Task" [ 1672.377365] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.383055] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1672.383055] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1672.386909] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a5fffc9-e120-4343-9675-bbd3efa6f03a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.389297] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f61b2-7461-9ffb-3fba-8b345c3f3ca3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.396273] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1672.396273] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52330dd1-dfe1-8fec-f86e-296183312223" [ 1672.396273] env[62508]: _type = "Task" [ 1672.396273] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.399032] env[62508]: DEBUG nova.network.neutron [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.409398] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52330dd1-dfe1-8fec-f86e-296183312223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.426167] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1672.426429] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1672.426591] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1672.426797] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1672.426954] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1672.427118] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1672.427328] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1672.427492] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1672.427660] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1672.427854] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1672.428053] env[62508]: DEBUG nova.virt.hardware [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1672.428900] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3753195d-ed7c-4a94-b1cf-b38ce26ef841 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.432672] env[62508]: DEBUG oslo_concurrency.lockutils [req-cc8cfdf8-11de-40a3-a098-0c9cbd84f8ec req-e9fd7c93-b41f-4faa-8cb7-7a56385f2fc2 service nova] Releasing lock "refresh_cache-c45b2c35-e58a-4ffa-861a-980747e552a1" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.437948] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9025bc-b19c-4a06-9502-d63fc4cf1cf9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.451695] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:42:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec2a5bd1-b682-40fe-825d-7029eb22f70e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.459205] env[62508]: DEBUG oslo.service.loopingcall [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.459447] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1672.459660] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-155b88f3-292c-46b0-aa1c-c0f8e82d364d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.482740] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.482740] env[62508]: value = "task-1776313" [ 1672.482740] env[62508]: _type = "Task" [ 1672.482740] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.493494] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776313, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.562273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.562273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.562273] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1672.820219] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.820495] env[62508]: DEBUG nova.compute.manager [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Inject network info {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1672.820839] env[62508]: DEBUG nova.compute.manager [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] network_info to inject: |[{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1672.826457] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfiguring VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1672.827056] env[62508]: DEBUG oslo_concurrency.lockutils [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] Acquired lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.827233] env[62508]: DEBUG nova.network.neutron [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Refreshing network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.828299] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df1545b8-e91d-452e-96be-247e7a442a21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.842675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6d079f50-0924-4881-a869-2f84dd829dea tempest-ServersWithSpecificFlavorTestJSON-1519310017 tempest-ServersWithSpecificFlavorTestJSON-1519310017-project-member] Lock "e07ab22e-bd07-4232-abfe-c0617c0b9813" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.795s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.848183] env[62508]: DEBUG oslo_vmware.api [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1672.848183] env[62508]: value = "task-1776314" [ 1672.848183] env[62508]: _type = "Task" [ 1672.848183] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.858612] env[62508]: DEBUG oslo_vmware.api [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.888657] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f61b2-7461-9ffb-3fba-8b345c3f3ca3, 'name': SearchDatastore_Task, 'duration_secs': 0.025869} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.888968] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.889216] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.889428] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.899454] env[62508]: INFO nova.compute.manager [-] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Took 1.52 seconds to deallocate network for instance. [ 1672.913891] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52330dd1-dfe1-8fec-f86e-296183312223, 'name': SearchDatastore_Task, 'duration_secs': 0.015536} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.914729] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7bb794-9ead-411e-ac0f-aad44556ed57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.920989] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1672.920989] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521fc53b-b1ba-2b69-1481-91ca4c2fd933" [ 1672.920989] env[62508]: _type = "Task" [ 1672.920989] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.930431] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521fc53b-b1ba-2b69-1481-91ca4c2fd933, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.993808] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776313, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.099402] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1673.252497] env[62508]: DEBUG nova.network.neutron [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Updating instance_info_cache with network_info: [{"id": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "address": "fa:16:3e:bf:8b:d8", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7c00805-5c", "ovs_interfaceid": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.328572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-95377de6-66af-4d8e-950c-f2945e471be9 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.330142] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.677s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.359524] env[62508]: DEBUG oslo_vmware.api [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776314, 'name': ReconfigVM_Task, 'duration_secs': 0.156141} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.359801] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b1be2b07-e7de-4575-a6e9-2bbdd8484c7f tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Reconfigured VM instance to set the machine id {{(pid=62508) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1673.434924] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521fc53b-b1ba-2b69-1481-91ca4c2fd933, 'name': SearchDatastore_Task, 'duration_secs': 0.011644} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.434924] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.434924] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] c45b2c35-e58a-4ffa-861a-980747e552a1/c45b2c35-e58a-4ffa-861a-980747e552a1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1673.434924] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.435145] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.435353] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c267c26-42e4-46eb-893b-559c55c9e225 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.437796] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10a3f71f-9e26-4bfa-92cb-2ba891cdd1fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.446623] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1673.446623] env[62508]: value = "task-1776315" [ 1673.446623] env[62508]: _type = "Task" [ 1673.446623] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.454026] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.454026] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1673.454026] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726cd027-0168-41e1-b83c-f65f5402dc17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.457892] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.458787] env[62508]: INFO nova.compute.manager [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Took 0.56 seconds to detach 1 volumes for instance. [ 1673.461233] env[62508]: DEBUG nova.compute.manager [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Deleting volume: aea26e09-c771-424f-b682-9ea0232aedf2 {{(pid=62508) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1673.464344] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1673.464344] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aa20ae-6aed-1d28-523b-76f45dfa27fe" [ 1673.464344] env[62508]: _type = "Task" [ 1673.464344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.475695] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aa20ae-6aed-1d28-523b-76f45dfa27fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.493373] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776313, 'name': CreateVM_Task, 'duration_secs': 0.6123} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.494540] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.494540] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.494540] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.494942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.495285] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73e02622-1358-4707-8e57-24be71ca1a6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.501597] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1673.501597] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aaad74-2a6c-bb83-7d5c-5ce1a23885de" [ 1673.501597] env[62508]: _type = "Task" [ 1673.501597] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.511158] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aaad74-2a6c-bb83-7d5c-5ce1a23885de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.575775] env[62508]: DEBUG nova.network.neutron [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updated VIF entry in instance network info cache for port d993966a-9d82-49a0-9e8a-f1835407ecad. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.576158] env[62508]: DEBUG nova.network.neutron [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [{"id": "d993966a-9d82-49a0-9e8a-f1835407ecad", "address": "fa:16:3e:62:45:86", "network": {"id": "a60137fc-6bcc-47ca-a062-ff0c72ec6801", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1168729734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2b18b2f1d6d439382b9e0fc89b2a3aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd993966a-9d", "ovs_interfaceid": "d993966a-9d82-49a0-9e8a-f1835407ecad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.670783] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.671144] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.671305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.671465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.671640] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.675055] env[62508]: INFO nova.compute.manager [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Terminating instance [ 1673.676903] env[62508]: DEBUG nova.compute.manager [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1673.677125] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1673.678018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee81024-fe1c-4629-a868-b8d9731b86ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.688758] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1673.689089] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8b646d1-d660-4916-8e78-d0f1111387c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.700028] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1673.700028] env[62508]: value = "task-1776317" [ 1673.700028] env[62508]: _type = "Task" [ 1673.700028] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.711981] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.755770] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.756054] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Instance network_info: |[{"id": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "address": "fa:16:3e:bf:8b:d8", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7c00805-5c", "ovs_interfaceid": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1673.757571] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:8b:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7c00805-5cf9-431e-95ea-ec950fedaa89', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.765929] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating folder: Project (6e90ec7156574be597a12f4fa0e8c1dc). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1673.767402] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88e995c5-6574-46ee-8695-e7e9e6a0ce45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.772275] env[62508]: DEBUG nova.compute.manager [req-3c7d519a-b996-4ef8-af82-bf17c36567fc req-7aae2d6b-ba2c-43c4-a2d4-be0e61c3ca0a service nova] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Received event network-vif-deleted-c34d40a3-404e-4f74-8cdc-ba8732e8c103 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1673.786220] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created folder: Project (6e90ec7156574be597a12f4fa0e8c1dc) in parent group-v368536. [ 1673.786220] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating folder: Instances. Parent ref: group-v368771. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1673.786220] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b17267e6-877c-486d-a5cf-473118a8e344 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.796240] env[62508]: DEBUG nova.compute.manager [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Received event network-changed-b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1673.796240] env[62508]: DEBUG nova.compute.manager [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Refreshing instance network info cache due to event network-changed-b7c00805-5cf9-431e-95ea-ec950fedaa89. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1673.796403] env[62508]: DEBUG oslo_concurrency.lockutils [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] Acquiring lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.796403] env[62508]: DEBUG oslo_concurrency.lockutils [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] Acquired lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.796818] env[62508]: DEBUG nova.network.neutron [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Refreshing network info cache for port b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1673.804540] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created folder: Instances in parent group-v368771. [ 1673.804540] env[62508]: DEBUG oslo.service.loopingcall [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.804540] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.804540] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20e38161-0c8b-4caa-be3c-cfbe9f76da47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.829551] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.829551] env[62508]: value = "task-1776320" [ 1673.829551] env[62508]: _type = "Task" [ 1673.829551] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.837772] env[62508]: INFO nova.compute.claims [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1673.851327] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776320, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.959199] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776315, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.977048] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aa20ae-6aed-1d28-523b-76f45dfa27fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01564} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.977498] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8229be58-87f8-487e-93b4-061c1688c307 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.984522] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1673.984522] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c1c46-4705-0cc6-c885-2288ecb86a23" [ 1673.984522] env[62508]: _type = "Task" [ 1673.984522] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.993130] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c1c46-4705-0cc6-c885-2288ecb86a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.011917] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aaad74-2a6c-bb83-7d5c-5ce1a23885de, 'name': SearchDatastore_Task, 'duration_secs': 0.014397} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.012274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.012588] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.012722] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.016926] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.079026] env[62508]: DEBUG oslo_concurrency.lockutils [req-e49008db-4d7f-40da-813a-bba785faf414 req-3ed328b9-cc73-4ae2-b96c-337bf6837c0d service nova] Releasing lock "refresh_cache-6afa4e73-64b4-4b10-b598-433f0c22ecb3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.210506] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776317, 'name': PowerOffVM_Task, 'duration_secs': 0.503904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.210833] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1674.211034] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1674.211301] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dda898c5-cd6e-48b0-b50d-c62ad3aa4124 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.341883] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1674.347313] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1674.347313] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Deleting the datastore file [datastore1] 6afa4e73-64b4-4b10-b598-433f0c22ecb3 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1674.354482] env[62508]: INFO nova.compute.resource_tracker [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating resource usage from migration 3dac5208-b604-4108-b35a-0de7840810dc [ 1674.358800] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97aaa335-2aba-4192-abc9-970854da4360 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.360781] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776320, 'name': CreateVM_Task, 'duration_secs': 0.48197} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.363142] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1674.364213] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.364330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.364648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.365258] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3a7f01-f8d3-4d54-9a65-f72be58d885d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.369991] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for the task: (returnval){ [ 1674.369991] env[62508]: value = "task-1776322" [ 1674.369991] env[62508]: _type = "Task" [ 1674.369991] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.371638] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1674.371638] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f977a7-e283-1bc9-3e15-7b133d3194fa" [ 1674.371638] env[62508]: _type = "Task" [ 1674.371638] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.384579] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.391029] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f977a7-e283-1bc9-3e15-7b133d3194fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.463127] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589269} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.466022] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] c45b2c35-e58a-4ffa-861a-980747e552a1/c45b2c35-e58a-4ffa-861a-980747e552a1.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1674.466022] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1674.466022] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af4d12aa-d0d9-4e06-8a10-8f4df8a648bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.481414] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1674.481414] env[62508]: value = "task-1776323" [ 1674.481414] env[62508]: _type = "Task" [ 1674.481414] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.506444] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.513478] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c1c46-4705-0cc6-c885-2288ecb86a23, 'name': SearchDatastore_Task, 'duration_secs': 0.032783} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.516882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.517179] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1674.517504] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.517814] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.518170] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37768899-8433-4763-9972-90cb318ca312 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.521899] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d082bc6-4fa6-49c7-a79d-64051652de3f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.531664] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1674.531664] env[62508]: value = "task-1776324" [ 1674.531664] env[62508]: _type = "Task" [ 1674.531664] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.542745] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.542745] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.545079] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aedef9e8-0549-4cd8-bc58-95ac92dc5c1f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.553800] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.558762] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1674.558762] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d766a-6978-20f1-1686-582d8e326443" [ 1674.558762] env[62508]: _type = "Task" [ 1674.558762] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.573630] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d766a-6978-20f1-1686-582d8e326443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.647316] env[62508]: DEBUG nova.network.neutron [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Updated VIF entry in instance network info cache for port b7c00805-5cf9-431e-95ea-ec950fedaa89. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1674.647622] env[62508]: DEBUG nova.network.neutron [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Updating instance_info_cache with network_info: [{"id": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "address": "fa:16:3e:bf:8b:d8", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7c00805-5c", "ovs_interfaceid": "b7c00805-5cf9-431e-95ea-ec950fedaa89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.751860] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c188e58-74f0-4aff-b18f-282aa4eb986b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.761285] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ad01be-d842-4649-9b85-fa09103f0734 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.793879] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b62aaf-e9a1-4d46-a51c-cc8571de8143 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.803091] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c16dc9-768f-416c-a5ec-391e49a9e3cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.819462] env[62508]: DEBUG nova.compute.provider_tree [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.890640] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.895372] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f977a7-e283-1bc9-3e15-7b133d3194fa, 'name': SearchDatastore_Task, 'duration_secs': 0.022249} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.895635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.895873] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.896100] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.995977] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224061} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.996255] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1674.997069] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134de7f6-6e5b-48a4-9fa7-a8b566747126 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.021973] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] c45b2c35-e58a-4ffa-861a-980747e552a1/c45b2c35-e58a-4ffa-861a-980747e552a1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.022327] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7853d3e9-7177-471f-9396-e4ed59ccbf71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.054715] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.055232] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1675.055232] env[62508]: value = "task-1776325" [ 1675.055232] env[62508]: _type = "Task" [ 1675.055232] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.064184] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776325, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.073917] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d766a-6978-20f1-1686-582d8e326443, 'name': SearchDatastore_Task, 'duration_secs': 0.021686} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.074760] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e550add0-e808-4bb0-96c3-bc2f6ea23449 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.080998] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1675.080998] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5225c696-f708-0ae3-1e98-7bdcb9b7e069" [ 1675.080998] env[62508]: _type = "Task" [ 1675.080998] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.091121] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5225c696-f708-0ae3-1e98-7bdcb9b7e069, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.152447] env[62508]: DEBUG oslo_concurrency.lockutils [req-7161ddf5-db69-4b2d-a74b-80896a478bd9 req-760f206d-cdb0-40d0-9812-6deca7ec879b service nova] Releasing lock "refresh_cache-5b3dd9d0-7f30-45c2-931a-ce7175820710" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.323375] env[62508]: DEBUG nova.scheduler.client.report [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1675.385419] env[62508]: DEBUG oslo_vmware.api [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Task: {'id': task-1776322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.649012} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.385745] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1675.385887] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1675.386102] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1675.386284] env[62508]: INFO nova.compute.manager [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1675.386695] env[62508]: DEBUG oslo.service.loopingcall [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.386919] env[62508]: DEBUG nova.compute.manager [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1675.387051] env[62508]: DEBUG nova.network.neutron [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1675.554439] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.936533} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.554439] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1675.554439] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1675.554439] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-557da958-727f-4d19-b571-cfc43df7ea1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.560971] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1675.560971] env[62508]: value = "task-1776326" [ 1675.560971] env[62508]: _type = "Task" [ 1675.560971] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.568540] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776325, 'name': ReconfigVM_Task, 'duration_secs': 0.386904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.568667] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Reconfigured VM instance instance-00000052 to attach disk [datastore1] c45b2c35-e58a-4ffa-861a-980747e552a1/c45b2c35-e58a-4ffa-861a-980747e552a1.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1675.570367] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7be97f62-e445-48e8-8c50-370916fcb434 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.574425] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.581091] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1675.581091] env[62508]: value = "task-1776327" [ 1675.581091] env[62508]: _type = "Task" [ 1675.581091] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.599417] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776327, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.603546] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5225c696-f708-0ae3-1e98-7bdcb9b7e069, 'name': SearchDatastore_Task, 'duration_secs': 0.014604} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.603816] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.604080] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.605473] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.605473] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1675.605473] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e9d0be2-7ec4-4ad4-9d7d-db5d4c3d161e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.607190] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61edee25-593f-410e-b273-cab1c01ceb18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.617037] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1675.617037] env[62508]: value = "task-1776328" [ 1675.617037] env[62508]: _type = "Task" [ 1675.617037] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.623141] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1675.623350] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1675.627643] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-988bafbe-c7f3-4273-821a-b2d61ee5e466 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.634084] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.653119] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1675.653119] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525af0d5-73dd-cdf3-705c-13691c3f38a8" [ 1675.653119] env[62508]: _type = "Task" [ 1675.653119] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.664416] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525af0d5-73dd-cdf3-705c-13691c3f38a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.833491] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.501s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.833491] env[62508]: INFO nova.compute.manager [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Migrating [ 1675.838903] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.627s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.840473] env[62508]: INFO nova.compute.claims [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.078713] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.092593] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776327, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.128496] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776328, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.164593] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525af0d5-73dd-cdf3-705c-13691c3f38a8, 'name': SearchDatastore_Task, 'duration_secs': 0.036323} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.166346] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc4e038c-ee41-45f9-b236-efb0742bea7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.174023] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1676.174023] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526648da-bb90-5ee2-f549-6e0263445e31" [ 1676.174023] env[62508]: _type = "Task" [ 1676.174023] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.182712] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526648da-bb90-5ee2-f549-6e0263445e31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.228734] env[62508]: DEBUG nova.compute.manager [req-ae14e119-8936-41e9-9c70-30af9129fb1f req-802ee475-217f-4aff-82d3-9fe5c7049622 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Received event network-vif-deleted-d993966a-9d82-49a0-9e8a-f1835407ecad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.228877] env[62508]: INFO nova.compute.manager [req-ae14e119-8936-41e9-9c70-30af9129fb1f req-802ee475-217f-4aff-82d3-9fe5c7049622 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Neutron deleted interface d993966a-9d82-49a0-9e8a-f1835407ecad; detaching it from the instance and deleting it from the info cache [ 1676.229376] env[62508]: DEBUG nova.network.neutron [req-ae14e119-8936-41e9-9c70-30af9129fb1f req-802ee475-217f-4aff-82d3-9fe5c7049622 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.354093] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.354665] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.354665] env[62508]: DEBUG nova.network.neutron [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1676.575455] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.88761} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.576103] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1676.577242] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc0fd33-3c57-44cb-bb57-019f53c3ed1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.602335] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1676.605778] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc32d8ce-d41a-444c-b13b-b2eb45ff3600 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.628071] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776327, 'name': Rename_Task, 'duration_secs': 0.934412} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.632053] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1676.632053] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b1a04f6-53fe-4594-8915-c4c25f864598 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.632507] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1676.632507] env[62508]: value = "task-1776329" [ 1676.632507] env[62508]: _type = "Task" [ 1676.632507] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.635905] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626565} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.639641] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.640090] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.640784] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aab0632-6b2e-4c40-8f18-415ad1690f2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.655489] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.658486] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1676.658486] env[62508]: value = "task-1776330" [ 1676.658486] env[62508]: _type = "Task" [ 1676.658486] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.661781] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1676.661781] env[62508]: value = "task-1776331" [ 1676.661781] env[62508]: _type = "Task" [ 1676.661781] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.669919] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.676227] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.686482] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526648da-bb90-5ee2-f549-6e0263445e31, 'name': SearchDatastore_Task, 'duration_secs': 0.038479} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.686826] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.687149] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5b3dd9d0-7f30-45c2-931a-ce7175820710/5b3dd9d0-7f30-45c2-931a-ce7175820710.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1676.687454] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48d55d6a-f8e5-4cdb-93f0-8627b5fb1675 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.697292] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1676.697292] env[62508]: value = "task-1776332" [ 1676.697292] env[62508]: _type = "Task" [ 1676.697292] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.697580] env[62508]: DEBUG nova.network.neutron [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.710326] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.733954] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1463b1f0-db52-4a8d-bfb2-3ab1182c8413 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.745162] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56009098-42b2-4ee7-921d-371cd3990bc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.791835] env[62508]: DEBUG nova.compute.manager [req-ae14e119-8936-41e9-9c70-30af9129fb1f req-802ee475-217f-4aff-82d3-9fe5c7049622 service nova] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Detach interface failed, port_id=d993966a-9d82-49a0-9e8a-f1835407ecad, reason: Instance 6afa4e73-64b4-4b10-b598-433f0c22ecb3 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1677.152440] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776329, 'name': ReconfigVM_Task, 'duration_secs': 0.415969} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.153048] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfigured VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1677.158316] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9af0e5c8-b925-4026-88fc-e7bdfeb81332 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.177273] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1677.177273] env[62508]: value = "task-1776333" [ 1677.177273] env[62508]: _type = "Task" [ 1677.177273] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.196597] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098847} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.197040] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776330, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.201742] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.203235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f92aa54-c2cc-48d7-a6c9-4285aad6f7f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.222455] env[62508]: INFO nova.compute.manager [-] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Took 1.84 seconds to deallocate network for instance. [ 1677.223088] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776333, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.232594] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776332, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.258130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.259582] env[62508]: DEBUG nova.network.neutron [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.263567] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-680edb7e-9692-4d01-af08-a00ab95e3d50 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.280740] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.289556] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1677.289556] env[62508]: value = "task-1776334" [ 1677.289556] env[62508]: _type = "Task" [ 1677.289556] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.301141] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.380687] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db955a6b-09e3-41ee-8e93-9aa8a7d7a56c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.390820] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59975b5a-d262-449f-89b7-bb4d6a94eceb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.425128] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff7e10e-f194-4d1f-829a-959f3f8740e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.435328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6582b27b-b3e6-4068-a18e-28f838766fb3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.450459] env[62508]: DEBUG nova.compute.provider_tree [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1677.672407] env[62508]: DEBUG oslo_vmware.api [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776330, 'name': PowerOnVM_Task, 'duration_secs': 0.577901} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.672793] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.672925] env[62508]: INFO nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1677.673113] env[62508]: DEBUG nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1677.673985] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3784c06-aa29-4329-afaf-d0300453877f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.689923] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776333, 'name': Rename_Task, 'duration_secs': 0.179219} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.689923] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.690126] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99ac266f-283b-48d4-8d2d-8b6e9b014f40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.698266] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1677.698266] env[62508]: value = "task-1776335" [ 1677.698266] env[62508]: _type = "Task" [ 1677.698266] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.706207] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.715395] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553358} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.715750] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5b3dd9d0-7f30-45c2-931a-ce7175820710/5b3dd9d0-7f30-45c2-931a-ce7175820710.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1677.716132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1677.716351] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ddfd3b9-8e8e-458a-bd04-595701465c1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.724867] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1677.724867] env[62508]: value = "task-1776336" [ 1677.724867] env[62508]: _type = "Task" [ 1677.724867] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.735572] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.759819] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.800189] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.989024] env[62508]: DEBUG nova.scheduler.client.report [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1677.989181] env[62508]: DEBUG nova.compute.provider_tree [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 126 to 127 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1677.989312] env[62508]: DEBUG nova.compute.provider_tree [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1678.194701] env[62508]: INFO nova.compute.manager [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Took 15.18 seconds to build instance. [ 1678.211259] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776335, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.241361] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078757} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.241908] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1678.243218] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff84af17-519e-4a6b-96e4-de0d214140a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.275506] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 5b3dd9d0-7f30-45c2-931a-ce7175820710/5b3dd9d0-7f30-45c2-931a-ce7175820710.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1678.275914] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3478219e-5e86-44fa-a7e0-9d4e630712d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.305229] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776334, 'name': ReconfigVM_Task, 'duration_secs': 0.576914} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.306638] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976/145306d7-f0e8-46c0-b2ab-1c41c208f976.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.308348] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_type': 'disk', 'boot_index': 0, 'encryption_options': None, 'device_name': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_format': None, 'image_id': 'f81c384b-39f5-44b6-928f-ab9f4bc0a9f7'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'device_type': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'}, 'disk_bus': None, 'guest_format': None, 'attachment_id': 'cea27d58-f793-44b5-9217-207a6bb7e8e3', 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=62508) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1678.308621] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1678.308881] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1678.309789] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1678.309789] env[62508]: value = "task-1776337" [ 1678.309789] env[62508]: _type = "Task" [ 1678.309789] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.311413] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e043331-085c-4684-8721-32181db7a8d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.325113] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.340713] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473b828-1036-418e-9d3a-d3cf4f4ec8cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.368424] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1678.368770] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9556951-b3c2-40f5-86c6-189d41a377fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.389799] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1678.389799] env[62508]: value = "task-1776338" [ 1678.389799] env[62508]: _type = "Task" [ 1678.389799] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.400724] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.494542] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.495226] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1678.501238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.483s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.501238] env[62508]: DEBUG nova.objects.instance [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lazy-loading 'resources' on Instance uuid f307d4d5-e877-4d0a-951c-779c1d2e573b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.699063] env[62508]: DEBUG oslo_concurrency.lockutils [None req-02fd26da-4216-4531-88a0-10ffaab893e4 tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.696s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.709849] env[62508]: DEBUG oslo_vmware.api [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776335, 'name': PowerOnVM_Task, 'duration_secs': 1.002678} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.710175] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1678.710391] env[62508]: DEBUG nova.compute.manager [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1678.711307] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760f5ca0-fbf9-4c46-99a8-2c85f95c27ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.802684] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698574be-c0be-43a5-bc6b-e53109b8ac7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.825673] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1678.839344] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776337, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.900224] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776338, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.001540] env[62508]: DEBUG nova.compute.utils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1679.003259] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1679.003438] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1679.106306] env[62508]: DEBUG nova.policy [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '724b74d525e84062b8adfe88b8c67a14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08a2bcaca6e4406a8ccda7b934995f15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1679.227937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.266598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "c45b2c35-e58a-4ffa-861a-980747e552a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.266925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.267295] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.267570] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.268298] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.270632] env[62508]: INFO nova.compute.manager [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Terminating instance [ 1679.274794] env[62508]: DEBUG nova.compute.manager [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1679.278019] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1679.278019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151a4009-8b8b-4e53-9ba3-630ce1744687 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.291778] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1679.292345] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1f24b19-452f-43bf-a144-2ebccb98ad41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.300409] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1679.300409] env[62508]: value = "task-1776339" [ 1679.300409] env[62508]: _type = "Task" [ 1679.300409] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.315076] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.335761] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1679.336115] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776337, 'name': ReconfigVM_Task, 'duration_secs': 0.585722} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.338730] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48809b43-151b-4110-af4a-1185b056ca6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.340366] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 5b3dd9d0-7f30-45c2-931a-ce7175820710/5b3dd9d0-7f30-45c2-931a-ce7175820710.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1679.341167] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c058173-bd93-4a1c-a7c8-12353d67d5b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.348790] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1679.348790] env[62508]: value = "task-1776340" [ 1679.348790] env[62508]: _type = "Task" [ 1679.348790] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.350084] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1679.350084] env[62508]: value = "task-1776341" [ 1679.350084] env[62508]: _type = "Task" [ 1679.350084] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.365699] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776340, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.368841] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1679.369061] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1679.374198] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1c4744-2c56-44d2-8881-32c8c130a64b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.382675] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecce8f2-2924-44aa-86a6-f08c15f964b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.423224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76943e40-d6cf-472b-8b7c-014c94527247 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.436160] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1867fae-2488-4b7a-b2a9-db3389a06255 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.440223] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776338, 'name': ReconfigVM_Task, 'duration_secs': 0.946754} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.441287] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to attach disk [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1679.446918] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c557a5fe-10f0-4c15-89ab-2d76a4673f7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.466508] env[62508]: DEBUG nova.compute.provider_tree [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.475707] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1679.475707] env[62508]: value = "task-1776342" [ 1679.475707] env[62508]: _type = "Task" [ 1679.475707] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.488472] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776342, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.509537] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1679.524515] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Successfully created port: aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.817237] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776339, 'name': PowerOffVM_Task, 'duration_secs': 0.210018} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.817237] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1679.817237] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1679.817237] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f225bd8-fd53-4b7f-9e7e-940a2633ffa5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.861228] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776340, 'name': Rename_Task, 'duration_secs': 0.211818} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.861520] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1679.861810] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-416011ec-b730-4e58-b31f-a421b4a88c4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.868725] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1679.868725] env[62508]: value = "task-1776344" [ 1679.868725] env[62508]: _type = "Task" [ 1679.868725] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.881538] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1679.882098] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1679.882098] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1679.882344] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1679.882547] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1679.882749] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1679.883021] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1679.883269] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1679.883527] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1679.883766] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1679.884082] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1679.889604] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.890289] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5369cf34-1cf8-4dbe-92c8-57cf70fb7dc5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.907222] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1679.907222] env[62508]: value = "task-1776345" [ 1679.907222] env[62508]: _type = "Task" [ 1679.907222] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.917353] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.969816] env[62508]: DEBUG nova.scheduler.client.report [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1679.989467] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776342, 'name': ReconfigVM_Task, 'duration_secs': 0.414578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.989467] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1679.990564] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26a9d7db-3ea1-4596-b8d2-f96d60b6548f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.999541] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1679.999541] env[62508]: value = "task-1776346" [ 1679.999541] env[62508]: _type = "Task" [ 1679.999541] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.009680] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776346, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.343019] env[62508]: INFO nova.compute.manager [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Rebuilding instance [ 1680.381619] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776344, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.391427] env[62508]: DEBUG nova.compute.manager [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1680.392271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74ae14b-c212-48b8-8013-7f8df2e2101a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.417824] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776345, 'name': ReconfigVM_Task, 'duration_secs': 0.207131} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.418207] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1680.475358] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.976s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.477599] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.718s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.478523] env[62508]: DEBUG nova.objects.instance [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lazy-loading 'resources' on Instance uuid 6afa4e73-64b4-4b10-b598-433f0c22ecb3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1680.496136] env[62508]: INFO nova.scheduler.client.report [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Deleted allocations for instance f307d4d5-e877-4d0a-951c-779c1d2e573b [ 1680.510521] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776346, 'name': Rename_Task, 'duration_secs': 0.24615} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.510798] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1680.511094] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f66a8a45-021d-45c1-8fe7-e45bba901cde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.520579] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1680.523996] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1680.523996] env[62508]: value = "task-1776347" [ 1680.523996] env[62508]: _type = "Task" [ 1680.523996] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.534623] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.551480] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1680.551730] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1680.551887] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.552082] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1680.552252] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.552376] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1680.552581] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1680.552738] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1680.552903] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1680.553663] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1680.553913] env[62508]: DEBUG nova.virt.hardware [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1680.554794] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3d3251-d8b6-44e1-baca-0532d765bf70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.564199] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c82507-5598-463b-be30-2ba99aea1f23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.882850] env[62508]: DEBUG oslo_vmware.api [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776344, 'name': PowerOnVM_Task, 'duration_secs': 0.522255} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.883198] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.883379] env[62508]: INFO nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Took 9.85 seconds to spawn the instance on the hypervisor. [ 1680.883508] env[62508]: DEBUG nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1680.884354] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460c5cd8-6b32-4531-8e37-8ef19828c899 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.906026] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.906026] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcc96901-d234-4756-ac9c-4ee2e28983db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.913365] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1680.913365] env[62508]: value = "task-1776348" [ 1680.913365] env[62508]: _type = "Task" [ 1680.913365] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.923345] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.927518] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1680.927878] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1680.927992] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.928138] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1680.928308] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.928430] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1680.928625] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1680.928776] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1680.928933] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1680.930836] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1680.930836] env[62508]: DEBUG nova.virt.hardware [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1680.939651] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfiguring VM instance instance-00000022 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1680.940668] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36a501a1-7767-430d-bd0a-fe4927c6eb89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.967293] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1680.967293] env[62508]: value = "task-1776349" [ 1680.967293] env[62508]: _type = "Task" [ 1680.967293] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.977831] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.011513] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1566d08d-9c7d-4d1e-aa8b-826b3bb42189 tempest-ServersTestBootFromVolume-1626959145 tempest-ServersTestBootFromVolume-1626959145-project-member] Lock "f307d4d5-e877-4d0a-951c-779c1d2e573b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.078s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.039924] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776347, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.369391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11540002-7c3a-44bb-abee-21cdffe8284b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.380332] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a4ee9a-07fc-4d23-8fc5-82bb3b596681 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.432221] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975a72f7-6b17-4c42-a19d-6a436696a2d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.437248] env[62508]: INFO nova.compute.manager [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Took 14.87 seconds to build instance. [ 1681.448175] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5346ef9f-beba-4182-a53c-985b77db3bc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.453575] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776348, 'name': PowerOffVM_Task, 'duration_secs': 0.258878} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.454825] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.455207] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1681.456425] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff671b44-f75e-4739-b1d5-13e69d014488 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.469998] env[62508]: DEBUG nova.compute.provider_tree [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.477947] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.478608] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-406e836a-28b2-45a8-a268-6c1ca2dcbc51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.483807] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776349, 'name': ReconfigVM_Task, 'duration_secs': 0.30936} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.484692] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfigured VM instance instance-00000022 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1681.485989] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94efbfb5-60df-4c4d-8817-19126ae05774 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.511181] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1681.511845] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dfcfad0-29f8-4ace-9190-3a9cc5ce9098 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.533193] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1681.533193] env[62508]: value = "task-1776351" [ 1681.533193] env[62508]: _type = "Task" [ 1681.533193] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.540398] env[62508]: DEBUG oslo_vmware.api [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776347, 'name': PowerOnVM_Task, 'duration_secs': 0.853405} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.540398] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1681.540398] env[62508]: DEBUG nova.compute.manager [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1681.541170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c704ce9e-c0a8-4757-a627-a26888ae8a74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.546705] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.863625] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.942021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-295fa960-fd75-4e7e-8c1c-c21fe45a9d54 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.385s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.942021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.076s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.942021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.942021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.942021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.943284] env[62508]: INFO nova.compute.manager [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Terminating instance [ 1681.945222] env[62508]: DEBUG nova.compute.manager [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1681.945406] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1681.946253] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96dc406-2301-4eb3-b40f-cc3485648377 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.956272] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.956538] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9de6af3e-c355-4acf-b062-2e95b555d7bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.964535] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1681.964535] env[62508]: value = "task-1776352" [ 1681.964535] env[62508]: _type = "Task" [ 1681.964535] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.972713] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.978920] env[62508]: DEBUG nova.scheduler.client.report [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1682.045784] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.061589] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.361680] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.361680] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.361680] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.361807] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.361886] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.362525] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleting the datastore file [datastore1] c45b2c35-e58a-4ffa-861a-980747e552a1 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.362827] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-412f0a3b-6319-4b64-a528-1445ea24e6a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.364792] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db2607b6-580b-48b3-88a6-e19e72532b95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.377436] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1682.377436] env[62508]: value = "task-1776353" [ 1682.377436] env[62508]: _type = "Task" [ 1682.377436] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.378919] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for the task: (returnval){ [ 1682.378919] env[62508]: value = "task-1776354" [ 1682.378919] env[62508]: _type = "Task" [ 1682.378919] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.392202] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.395226] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.482081] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776352, 'name': PowerOffVM_Task, 'duration_secs': 0.179027} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.482081] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1682.482280] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1682.485520] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aab43fd6-ca4f-4c69-b8d4-65295a890670 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.487233] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.490627] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.263s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.490870] env[62508]: DEBUG nova.objects.instance [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1682.515450] env[62508]: INFO nova.scheduler.client.report [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Deleted allocations for instance 6afa4e73-64b4-4b10-b598-433f0c22ecb3 [ 1682.550619] env[62508]: DEBUG oslo_vmware.api [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776351, 'name': ReconfigVM_Task, 'duration_secs': 0.66106} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.551515] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310/e478855d-e9c7-4abc-8e22-a4b2eb0c7310.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.552529] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1682.616515] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.616733] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.617396] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] 5b3dd9d0-7f30-45c2-931a-ce7175820710 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.619831] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0f6af63-86af-4b64-bb28-f6fa7518e0f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.625570] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1682.625570] env[62508]: value = "task-1776356" [ 1682.625570] env[62508]: _type = "Task" [ 1682.625570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.635848] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.865443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "9bafe786-5815-4871-8405-558cac7b3654" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.865825] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.901019] env[62508]: DEBUG oslo_vmware.api [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Task: {'id': task-1776354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167701} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.902071] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1682.903701] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1682.904081] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1682.904400] env[62508]: INFO nova.compute.manager [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Took 3.63 seconds to destroy the instance on the hypervisor. [ 1682.904785] env[62508]: DEBUG oslo.service.loopingcall [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1682.905174] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190605} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.909481] env[62508]: DEBUG nova.compute.manager [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1682.909481] env[62508]: DEBUG nova.network.neutron [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1682.914987] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1682.914987] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1682.915160] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1682.917731] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "2a564423-a2fd-4873-885f-37777bdd83eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.918234] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.026500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcb89cbc-d347-4c3b-8806-9541b28e7dcd tempest-AttachInterfacesUnderV243Test-1308958664 tempest-AttachInterfacesUnderV243Test-1308958664-project-member] Lock "6afa4e73-64b4-4b10-b598-433f0c22ecb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.355s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.065148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4895fb0-417f-4006-b876-746f617ddf8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.092023] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3fc899-589f-4f6e-b931-c96ddf053ad0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.111467] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1683.136875] env[62508]: DEBUG oslo_vmware.api [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465357} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.137395] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.137818] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.138202] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.138510] env[62508]: INFO nova.compute.manager [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1683.139127] env[62508]: DEBUG oslo.service.loopingcall [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.139429] env[62508]: DEBUG nova.compute.manager [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1683.139622] env[62508]: DEBUG nova.network.neutron [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.345568] env[62508]: DEBUG nova.compute.manager [req-1a089d3e-a860-4b20-b963-9c9dd3d84613 req-6589de8e-a616-4498-87f0-0bee5f5dd299 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Received event network-vif-deleted-115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1683.345813] env[62508]: INFO nova.compute.manager [req-1a089d3e-a860-4b20-b963-9c9dd3d84613 req-6589de8e-a616-4498-87f0-0bee5f5dd299 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Neutron deleted interface 115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9; detaching it from the instance and deleting it from the info cache [ 1683.345888] env[62508]: DEBUG nova.network.neutron [req-1a089d3e-a860-4b20-b963-9c9dd3d84613 req-6589de8e-a616-4498-87f0-0bee5f5dd299 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.368890] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1683.420832] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1683.509357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0e1ce68d-b9a5-4701-b2b9-32346a8670b2 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.509357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.447s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.509357] env[62508]: DEBUG nova.objects.instance [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1683.595761] env[62508]: DEBUG nova.compute.manager [req-64850bfe-7799-42bf-b857-378da8783262 req-789f8f42-355f-4d1a-a33c-c4bada81e175 service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Received event network-vif-deleted-b7c00805-5cf9-431e-95ea-ec950fedaa89 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1683.595992] env[62508]: INFO nova.compute.manager [req-64850bfe-7799-42bf-b857-378da8783262 req-789f8f42-355f-4d1a-a33c-c4bada81e175 service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Neutron deleted interface b7c00805-5cf9-431e-95ea-ec950fedaa89; detaching it from the instance and deleting it from the info cache [ 1683.597243] env[62508]: DEBUG nova.network.neutron [req-64850bfe-7799-42bf-b857-378da8783262 req-789f8f42-355f-4d1a-a33c-c4bada81e175 service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.662451] env[62508]: DEBUG nova.network.neutron [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Port ec6adf65-fbdf-4276-8e19-eb416336bbff binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1683.680469] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Successfully updated port: aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1683.681111] env[62508]: WARNING oslo_messaging._drivers.amqpdriver [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1683.687960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.688461] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.808036] env[62508]: DEBUG nova.network.neutron [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.852666] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fea76e79-393d-4ce6-91e2-dcccb0cbfae5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.862235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be435f00-f251-42d9-a71c-68e6bf3949de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.908113] env[62508]: DEBUG nova.compute.manager [req-1a089d3e-a860-4b20-b963-9c9dd3d84613 req-6589de8e-a616-4498-87f0-0bee5f5dd299 service nova] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Detach interface failed, port_id=115a9f0a-d45f-4776-82fb-ff4d9b1c9fc9, reason: Instance c45b2c35-e58a-4ffa-861a-980747e552a1 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1683.909587] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.943916] env[62508]: DEBUG nova.network.neutron [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.954843] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.967065] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1683.967345] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1683.967538] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1683.967803] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1683.967964] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1683.968129] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1683.968343] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1683.968504] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1683.968670] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1683.968834] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1683.969079] env[62508]: DEBUG nova.virt.hardware [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1683.969923] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cc4ec6-0633-490c-9f96-a2a9d00d7966 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.979272] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b1dbbb-8fa7-4396-874f-ab5f8a32bb17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.993929] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:82:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8570ede3-d3fc-41d9-90a0-3dc1ef777446', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1684.002973] env[62508]: DEBUG oslo.service.loopingcall [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.003324] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1684.003585] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de8bd477-b194-40d4-8d69-0d4a77b440c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.031137] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1684.031137] env[62508]: value = "task-1776357" [ 1684.031137] env[62508]: _type = "Task" [ 1684.031137] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.043495] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776357, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.099925] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7e4b7df-dfb0-4d68-a517-fc45c192db9f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.111923] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f514fda-5193-467d-8f57-0f6b091e4730 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.155551] env[62508]: DEBUG nova.compute.manager [req-64850bfe-7799-42bf-b857-378da8783262 req-789f8f42-355f-4d1a-a33c-c4bada81e175 service nova] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Detach interface failed, port_id=b7c00805-5cf9-431e-95ea-ec950fedaa89, reason: Instance 5b3dd9d0-7f30-45c2-931a-ce7175820710 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1684.186701] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.186951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.187073] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.192088] env[62508]: DEBUG nova.compute.utils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.311502] env[62508]: INFO nova.compute.manager [-] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Took 1.40 seconds to deallocate network for instance. [ 1684.447269] env[62508]: INFO nova.compute.manager [-] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Took 1.31 seconds to deallocate network for instance. [ 1684.526865] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f2688940-4c06-420d-bddf-0a251ce4248f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.527366] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.617s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.528852] env[62508]: INFO nova.compute.claims [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.547244] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776357, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.691551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.692101] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.692555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.700571] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.770505] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1684.820060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.954021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.051416] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776357, 'name': CreateVM_Task, 'duration_secs': 0.573547} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.052292] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1685.052972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.053156] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.053476] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1685.054084] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5d6aec5-614d-4cca-8316-0ba886ca1e8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.063121] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1685.063121] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527ed404-132a-bebc-d6d3-b4040aeb9978" [ 1685.063121] env[62508]: _type = "Task" [ 1685.063121] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.070414] env[62508]: DEBUG nova.network.neutron [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Updating instance_info_cache with network_info: [{"id": "aa763265-6f55-4c9b-88e0-e91807170310", "address": "fa:16:3e:0f:f1:29", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa763265-6f", "ovs_interfaceid": "aa763265-6f55-4c9b-88e0-e91807170310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.077266] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527ed404-132a-bebc-d6d3-b4040aeb9978, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.380225] env[62508]: DEBUG nova.compute.manager [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Received event network-vif-plugged-aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.380225] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Acquiring lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.380225] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.380859] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.381241] env[62508]: DEBUG nova.compute.manager [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] No waiting events found dispatching network-vif-plugged-aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1685.381573] env[62508]: WARNING nova.compute.manager [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Received unexpected event network-vif-plugged-aa763265-6f55-4c9b-88e0-e91807170310 for instance with vm_state building and task_state spawning. [ 1685.382143] env[62508]: DEBUG nova.compute.manager [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Received event network-changed-aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.382647] env[62508]: DEBUG nova.compute.manager [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Refreshing instance network info cache due to event network-changed-aa763265-6f55-4c9b-88e0-e91807170310. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1685.383229] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Acquiring lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.579333] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.579904] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Instance network_info: |[{"id": "aa763265-6f55-4c9b-88e0-e91807170310", "address": "fa:16:3e:0f:f1:29", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa763265-6f", "ovs_interfaceid": "aa763265-6f55-4c9b-88e0-e91807170310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1685.582020] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527ed404-132a-bebc-d6d3-b4040aeb9978, 'name': SearchDatastore_Task, 'duration_secs': 0.01169} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.583474] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Acquired lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.583818] env[62508]: DEBUG nova.network.neutron [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Refreshing network info cache for port aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.585356] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:f1:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa763265-6f55-4c9b-88e0-e91807170310', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.595317] env[62508]: DEBUG oslo.service.loopingcall [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.595317] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.595317] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1685.595317] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.595317] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.596743] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1685.597942] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.598277] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-202c8640-6b71-47cc-a56b-5e9b7c78ff0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.601481] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac91f897-f937-4f66-8ea1-2e1dfabc2d58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.628366] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.628366] env[62508]: value = "task-1776358" [ 1685.628366] env[62508]: _type = "Task" [ 1685.628366] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.629698] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1685.629913] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1685.635879] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25784b0f-abaf-409e-8cb9-4755ec4c483e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.648837] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776358, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.651085] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1685.651085] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bfadab-d90d-e6d5-8fdf-019e168b65fc" [ 1685.651085] env[62508]: _type = "Task" [ 1685.651085] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.665597] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bfadab-d90d-e6d5-8fdf-019e168b65fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010651} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.666457] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267e3142-b1c1-4a5c-aa50-9c7204bdad5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.677364] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1685.677364] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5293294c-b7be-58cd-3cbb-f954724454ef" [ 1685.677364] env[62508]: _type = "Task" [ 1685.677364] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.687979] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5293294c-b7be-58cd-3cbb-f954724454ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.770121] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.771463] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.771463] env[62508]: DEBUG nova.network.neutron [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1685.787360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.787604] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.788567] env[62508]: INFO nova.compute.manager [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Attaching volume 14a689d7-0dd2-4b7e-b497-37fc35863218 to /dev/sdb [ 1685.859355] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d76b7b-02d8-4228-a519-0c1678a66799 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.866591] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe725b0-c065-495b-a836-cfc1c61d5879 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.888383] env[62508]: DEBUG nova.virt.block_device [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating existing volume attachment record: 4e9a0145-3670-40d1-9982-1ba6a005defd {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1685.969495] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36ccbbf-770c-4727-a984-3aea5c8f4a2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.983147] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34313b93-1c9f-4c58-baa7-e086137b93b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.016373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1869e99e-2904-4def-9b06-34fcfa07ca78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.025718] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077d6f9a-49cb-44ee-bff4-5bce20b8f752 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.040847] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1686.141257] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776358, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.195931] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5293294c-b7be-58cd-3cbb-f954724454ef, 'name': SearchDatastore_Task, 'duration_secs': 0.013284} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.197356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.197356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1686.197356] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5530d262-214e-4689-85d2-3919ed840236 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.205715] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1686.205715] env[62508]: value = "task-1776360" [ 1686.205715] env[62508]: _type = "Task" [ 1686.205715] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.215173] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.458914] env[62508]: DEBUG nova.network.neutron [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Updated VIF entry in instance network info cache for port aa763265-6f55-4c9b-88e0-e91807170310. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.459423] env[62508]: DEBUG nova.network.neutron [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Updating instance_info_cache with network_info: [{"id": "aa763265-6f55-4c9b-88e0-e91807170310", "address": "fa:16:3e:0f:f1:29", "network": {"id": "f7d53ec2-0e84-4e27-87d1-6f2a48716aa5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-801595391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08a2bcaca6e4406a8ccda7b934995f15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa763265-6f", "ovs_interfaceid": "aa763265-6f55-4c9b-88e0-e91807170310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.566673] env[62508]: ERROR nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [req-7a839d3d-2f0e-4ba2-8622-2aae4275ac42] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7a839d3d-2f0e-4ba2-8622-2aae4275ac42"}]} [ 1686.597328] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1686.616445] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1686.616924] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1686.637994] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1686.655216] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776358, 'name': CreateVM_Task, 'duration_secs': 0.676567} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.655415] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1686.656117] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.656317] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.656650] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1686.656918] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c3db161-527b-4526-a546-fc85d42fdfa3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.663239] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1686.663239] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c428c3-dcc3-6c46-8e53-e8d244622a4f" [ 1686.663239] env[62508]: _type = "Task" [ 1686.663239] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.667468] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1686.681074] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c428c3-dcc3-6c46-8e53-e8d244622a4f, 'name': SearchDatastore_Task, 'duration_secs': 0.010931} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.681602] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.681978] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1686.682376] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.682658] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.682968] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1686.683610] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78561024-24df-458f-b6a4-4412134c00cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.697675] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1686.698050] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1686.699745] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227e7f25-6ebf-41ef-9333-55e0687ae833 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.716680] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1686.716680] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f6bc5b-6fcc-ac6e-e0e3-addfbd7849fd" [ 1686.716680] env[62508]: _type = "Task" [ 1686.716680] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.729884] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471153} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.731042] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1686.731318] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1686.731691] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f1e75af-b573-423a-a42e-45efac76b1fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.739247] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f6bc5b-6fcc-ac6e-e0e3-addfbd7849fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009563} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.744350] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e86121b-cfd9-4518-80bc-8f6de7efeee6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.749482] env[62508]: DEBUG nova.network.neutron [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.752742] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1686.752742] env[62508]: value = "task-1776363" [ 1686.752742] env[62508]: _type = "Task" [ 1686.752742] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.760030] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1686.760030] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b7cc7d-4f5c-d59f-aadb-37e3daf428ce" [ 1686.760030] env[62508]: _type = "Task" [ 1686.760030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.772446] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.779612] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b7cc7d-4f5c-d59f-aadb-37e3daf428ce, 'name': SearchDatastore_Task, 'duration_secs': 0.012308} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.779612] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.779867] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 73841c12-1ae9-46a5-bfe0-e0f82877667c/73841c12-1ae9-46a5-bfe0-e0f82877667c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1686.780455] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-469c9c27-5d01-459c-b8e6-ec5c99ce2637 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.790713] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1686.790713] env[62508]: value = "task-1776364" [ 1686.790713] env[62508]: _type = "Task" [ 1686.790713] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.801368] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.962420] env[62508]: DEBUG oslo_concurrency.lockutils [req-62462f1c-6ad6-416a-a907-51448b5a0acb req-eb7d9349-3921-464e-aa1d-7aea9374cb63 service nova] Releasing lock "refresh_cache-73841c12-1ae9-46a5-bfe0-e0f82877667c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.043826] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f09c9c6-aaf3-45a0-a614-13e9870eb048 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.054574] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814f543c-c6bf-49db-b391-df0fe9f2b5f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.094902] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15bb385-dc1c-42ee-bc0f-8fb8e918e71f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.105269] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89c11a0-d30a-4d02-9205-a97156089b82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.125842] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1687.260274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.267309] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064819} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.270024] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1687.270024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d4c1d6-4b06-4a15-8549-6e6e8e713009 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.291906] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1687.292316] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e3f477f-f33f-4ae0-90ae-d5ea3219a6d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.320153] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447402} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.321934] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 73841c12-1ae9-46a5-bfe0-e0f82877667c/73841c12-1ae9-46a5-bfe0-e0f82877667c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1687.321934] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1687.322146] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1687.322146] env[62508]: value = "task-1776365" [ 1687.322146] env[62508]: _type = "Task" [ 1687.322146] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.323281] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19cd84be-90fb-4655-9374-d6d26c3a1a49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.337369] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776365, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.337688] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1687.337688] env[62508]: value = "task-1776366" [ 1687.337688] env[62508]: _type = "Task" [ 1687.337688] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.347237] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.667765] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1687.667765] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 129 to 130 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1687.667765] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1687.787629] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8e0ffe-828b-48ec-8790-f2aafcac488d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.807514] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0011e2d4-364a-41a2-80b9-759caf8e55b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.815592] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1687.835584] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776365, 'name': ReconfigVM_Task, 'duration_secs': 0.301172} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.835859] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Reconfigured VM instance instance-00000049 to attach disk [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32/bee2cc61-b26c-4d2d-a2aa-ec79b8678e32.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1687.836489] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9765ddd1-b30c-430f-9830-9162075a7ea5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.846372] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.849654] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1687.849654] env[62508]: value = "task-1776367" [ 1687.849654] env[62508]: _type = "Task" [ 1687.849654] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.857101] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776367, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.172234] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.645s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.172770] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1688.175574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.222s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.176985] env[62508]: INFO nova.compute.claims [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1688.325192] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-297c66f3-8ddb-441d-a97b-90d141603db5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance 'e478855d-e9c7-4abc-8e22-a4b2eb0c7310' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1688.352638] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.939057} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.354920] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1688.357486] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65a6bcd-a2c2-424d-ae1b-a6f3b620a995 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.378567] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776367, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.391282] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 73841c12-1ae9-46a5-bfe0-e0f82877667c/73841c12-1ae9-46a5-bfe0-e0f82877667c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1688.391282] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b586f33-55e2-40cb-b14e-f176f94ffd19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.415026] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1688.415026] env[62508]: value = "task-1776369" [ 1688.415026] env[62508]: _type = "Task" [ 1688.415026] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.426096] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776369, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.684347] env[62508]: DEBUG nova.compute.utils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1688.686775] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1688.687165] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1688.747530] env[62508]: DEBUG nova.policy [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5d94bc165341d1ab686cadece5f7f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecc6c09064734ca381a22d894304cd80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1688.862985] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776367, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.926012] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.999643] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Successfully created port: 23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1689.002617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "806102ec-7622-4770-91c9-8c5723893dec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.003120] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.190853] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1689.368276] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776367, 'name': Rename_Task, 'duration_secs': 1.160109} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.368276] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1689.368276] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee639fb0-9558-4bae-b0e9-464d87aa4233 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.378288] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1689.378288] env[62508]: value = "task-1776370" [ 1689.378288] env[62508]: _type = "Task" [ 1689.378288] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.385394] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.426504] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776369, 'name': ReconfigVM_Task, 'duration_secs': 0.745332} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.429458] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 73841c12-1ae9-46a5-bfe0-e0f82877667c/73841c12-1ae9-46a5-bfe0-e0f82877667c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1689.430669] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be1d428f-82f4-45e1-8f36-daed9dd756ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.439368] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1689.439368] env[62508]: value = "task-1776371" [ 1689.439368] env[62508]: _type = "Task" [ 1689.439368] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.450883] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776371, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.506470] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1689.571807] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b588ee4-f4e2-4943-a2a0-6b218d860ccf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.581420] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f98d6c-bed5-4388-a674-47d8922e8689 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.615403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda5c2c3-eabc-423a-b50c-bbcf44625410 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.624543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3095d8ab-93b3-4d4a-a5f6-9b66d2dfb246 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.639024] env[62508]: DEBUG nova.compute.provider_tree [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1689.885868] env[62508]: DEBUG oslo_vmware.api [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776370, 'name': PowerOnVM_Task, 'duration_secs': 0.49733} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.886430] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.886758] env[62508]: DEBUG nova.compute.manager [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1689.887704] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d856a6c-ad8f-44c2-b487-a285d82089cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.950179] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776371, 'name': Rename_Task, 'duration_secs': 0.181017} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.950468] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1689.950725] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abb69672-a7ee-4921-82a7-84a14fbe9a55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.959517] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1689.959517] env[62508]: value = "task-1776372" [ 1689.959517] env[62508]: _type = "Task" [ 1689.959517] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.967590] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.027925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.141902] env[62508]: DEBUG nova.scheduler.client.report [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1690.147353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.147635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.147824] env[62508]: DEBUG nova.compute.manager [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Going to confirm migration 5 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1690.203589] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1690.229965] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1690.230240] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1690.230401] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1690.230632] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1690.230795] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1690.230939] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1690.231158] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1690.231377] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1690.231561] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1690.231730] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1690.231904] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1690.233170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d7b9f-0f75-4040-9911-bfba38ce2e8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.243969] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159c9445-fec4-444e-b94e-8ad711e4495a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.407909] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.453763] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1690.453924] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368777', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'name': 'volume-14a689d7-0dd2-4b7e-b497-37fc35863218', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a', 'attached_at': '', 'detached_at': '', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'serial': '14a689d7-0dd2-4b7e-b497-37fc35863218'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1690.455165] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd7abdb-9da2-462c-adbc-1494cf9cb965 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.477650] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760bce2f-a511-40bb-9b03-c23677aebfa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.483295] env[62508]: DEBUG oslo_vmware.api [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776372, 'name': PowerOnVM_Task, 'duration_secs': 0.500991} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.484069] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1690.484282] env[62508]: INFO nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Took 9.97 seconds to spawn the instance on the hypervisor. [ 1690.484441] env[62508]: DEBUG nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1690.485231] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befcb18c-ca7e-45da-97d6-4d11bd64970a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.509879] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] volume-14a689d7-0dd2-4b7e-b497-37fc35863218/volume-14a689d7-0dd2-4b7e-b497-37fc35863218.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1690.510738] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b0b21e7-bec4-487a-bb4a-6ecc4e8e9e75 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.527570] env[62508]: DEBUG nova.compute.manager [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Received event network-vif-plugged-23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1690.527809] env[62508]: DEBUG oslo_concurrency.lockutils [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] Acquiring lock "9bafe786-5815-4871-8405-558cac7b3654-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.527985] env[62508]: DEBUG oslo_concurrency.lockutils [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] Lock "9bafe786-5815-4871-8405-558cac7b3654-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.528190] env[62508]: DEBUG oslo_concurrency.lockutils [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] Lock "9bafe786-5815-4871-8405-558cac7b3654-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.528476] env[62508]: DEBUG nova.compute.manager [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] No waiting events found dispatching network-vif-plugged-23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1690.528559] env[62508]: WARNING nova.compute.manager [req-6445571e-89a5-4e75-a690-aed3c89ed8d7 req-4037cb3e-9bba-4439-aa40-94bd42dcb1cb service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Received unexpected event network-vif-plugged-23e0a38a-6ba2-45bf-8535-495296b681ae for instance with vm_state building and task_state spawning. [ 1690.539085] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1690.539085] env[62508]: value = "task-1776373" [ 1690.539085] env[62508]: _type = "Task" [ 1690.539085] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.551824] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776373, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.647084] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.647947] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1690.652449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.832s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.652616] env[62508]: DEBUG nova.objects.instance [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lazy-loading 'resources' on Instance uuid c45b2c35-e58a-4ffa-861a-980747e552a1 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1690.658701] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Successfully updated port: 23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1690.709737] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.709930] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.710346] env[62508]: DEBUG nova.network.neutron [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1690.710463] env[62508]: DEBUG nova.objects.instance [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'info_cache' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1690.993380] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.993687] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.994731] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1690.994731] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1691.044250] env[62508]: INFO nova.compute.manager [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Took 19.89 seconds to build instance. [ 1691.053471] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776373, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.154249] env[62508]: DEBUG nova.compute.utils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1691.155901] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1691.156099] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1691.166203] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.166320] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.166524] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1691.211654] env[62508]: DEBUG nova.policy [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5d94bc165341d1ab686cadece5f7f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecc6c09064734ca381a22d894304cd80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1691.439162] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.439485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.440036] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.440036] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.440202] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.442564] env[62508]: INFO nova.compute.manager [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Terminating instance [ 1691.444321] env[62508]: DEBUG nova.compute.manager [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1691.444542] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.445563] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40b609f-117c-4da3-b8f7-5d41db20cc52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.456858] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.457167] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-783b5ca9-62dd-45b4-8aaa-45650b44ad39 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.467836] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1691.467836] env[62508]: value = "task-1776374" [ 1691.467836] env[62508]: _type = "Task" [ 1691.467836] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.477462] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.503641] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Skipping network cache update for instance because it is being deleted. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1691.503808] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1691.503982] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1691.532998] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Successfully created port: b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1691.538751] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.538988] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.539153] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1691.539261] env[62508]: DEBUG nova.objects.instance [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lazy-loading 'info_cache' on Instance uuid de69dbf0-86f1-4b05-a9db-8b9afaabe49c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.546432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f09ceca7-8000-42c3-b1b0-1ef579073a2a tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.401s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.552702] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776373, 'name': ReconfigVM_Task, 'duration_secs': 0.77487} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.552702] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfigured VM instance instance-00000038 to attach disk [datastore1] volume-14a689d7-0dd2-4b7e-b497-37fc35863218/volume-14a689d7-0dd2-4b7e-b497-37fc35863218.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1691.558943] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3682e90a-daa2-4e28-9df3-22f710f1139b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.570471] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aedc4a-aeff-45a0-a13f-ac96006aa5ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.580736] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779cbf9b-5ee6-49f9-99e4-0f94314de73d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.585449] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1691.585449] env[62508]: value = "task-1776375" [ 1691.585449] env[62508]: _type = "Task" [ 1691.585449] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.619768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ed3f04-cc78-4417-a676-1ca1c318ae47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.625943] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.633659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068afe9-cdc2-4d36-8b84-a467df85cdaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.658114] env[62508]: DEBUG nova.compute.provider_tree [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1691.659876] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1691.703104] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.978887] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776374, 'name': PowerOffVM_Task, 'duration_secs': 0.282024} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.979172] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.979351] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1691.979601] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66a8c62a-4a63-49e4-ae7f-621aecd680d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.994549] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Updating instance_info_cache with network_info: [{"id": "23e0a38a-6ba2-45bf-8535-495296b681ae", "address": "fa:16:3e:b2:3a:7d", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e0a38a-6b", "ovs_interfaceid": "23e0a38a-6ba2-45bf-8535-495296b681ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.077146] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.077473] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.077665] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] e875f30e-2c25-46a4-8c74-36f08e7eb982 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.080490] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c949263-eedc-47b3-802f-8f78db8d9e1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.091851] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1692.091851] env[62508]: value = "task-1776377" [ 1692.091851] env[62508]: _type = "Task" [ 1692.091851] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.098790] env[62508]: DEBUG oslo_vmware.api [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776375, 'name': ReconfigVM_Task, 'duration_secs': 0.20169} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.099762] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368777', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'name': 'volume-14a689d7-0dd2-4b7e-b497-37fc35863218', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a', 'attached_at': '', 'detached_at': '', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'serial': '14a689d7-0dd2-4b7e-b497-37fc35863218'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1692.103792] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.120435] env[62508]: DEBUG nova.network.neutron [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.186222] env[62508]: ERROR nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] [req-9dc69c61-c7f5-4d7f-8469-2cdc4584e417] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9dc69c61-c7f5-4d7f-8469-2cdc4584e417"}]} [ 1692.205685] env[62508]: DEBUG nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1692.220751] env[62508]: DEBUG nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1692.220973] env[62508]: DEBUG nova.compute.provider_tree [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1692.232967] env[62508]: DEBUG nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1692.253957] env[62508]: DEBUG nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1692.500072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.500072] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Instance network_info: |[{"id": "23e0a38a-6ba2-45bf-8535-495296b681ae", "address": "fa:16:3e:b2:3a:7d", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e0a38a-6b", "ovs_interfaceid": "23e0a38a-6ba2-45bf-8535-495296b681ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1692.500509] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:3a:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23e0a38a-6ba2-45bf-8535-495296b681ae', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1692.508665] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating folder: Project (ecc6c09064734ca381a22d894304cd80). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.508942] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f10cbc71-8ec6-4269-b140-7932b84bac69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.526854] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created folder: Project (ecc6c09064734ca381a22d894304cd80) in parent group-v368536. [ 1692.527204] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating folder: Instances. Parent ref: group-v368778. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.527380] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c28708b-4d9a-43f2-a267-0bdff070d5f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.541569] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created folder: Instances in parent group-v368778. [ 1692.542181] env[62508]: DEBUG oslo.service.loopingcall [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.542181] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1692.542455] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bb605b7-9aab-4d20-9e9c-22a41b82faa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.566772] env[62508]: DEBUG nova.compute.manager [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Received event network-changed-23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1692.566999] env[62508]: DEBUG nova.compute.manager [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Refreshing instance network info cache due to event network-changed-23e0a38a-6ba2-45bf-8535-495296b681ae. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1692.567268] env[62508]: DEBUG oslo_concurrency.lockutils [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] Acquiring lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.567452] env[62508]: DEBUG oslo_concurrency.lockutils [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] Acquired lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.567652] env[62508]: DEBUG nova.network.neutron [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Refreshing network info cache for port 23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1692.576566] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1692.576566] env[62508]: value = "task-1776380" [ 1692.576566] env[62508]: _type = "Task" [ 1692.576566] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.585724] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781a505a-9c7d-4d91-bc7c-9f6c1d69054d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.601791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9847732-13f7-42c7-a0dd-c6b8ac186d11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.605707] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776380, 'name': CreateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.617818] env[62508]: DEBUG oslo_vmware.api [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.648590] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1692.648828] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1692.649081] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1692.649269] env[62508]: INFO nova.compute.manager [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1692.649523] env[62508]: DEBUG oslo.service.loopingcall [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.649993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.650240] env[62508]: DEBUG nova.objects.instance [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'migration_context' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1692.654850] env[62508]: DEBUG nova.compute.manager [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1692.655079] env[62508]: DEBUG nova.network.neutron [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1692.658052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22563dac-566d-4325-a78a-d156b3498674 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.668177] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9469d7ef-03e8-47e4-9020-cb6c774651bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.674023] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1692.690766] env[62508]: DEBUG nova.compute.provider_tree [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.711638] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1692.711882] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1692.712052] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1692.712242] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1692.712389] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1692.712535] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1692.712743] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1692.712907] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1692.713120] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1692.713261] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1692.713420] env[62508]: DEBUG nova.virt.hardware [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1692.714389] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a62cb3-7440-4ee0-ad5c-ff36c94455bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.727123] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5e91c7-090e-4b41-afc3-8df6e9dbfdaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.947955] env[62508]: DEBUG nova.compute.manager [req-747ab405-1ba2-4b1f-9219-b246e7b60bd6 req-3f42df83-846c-49d9-95f0-c164f9ebc693 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Received event network-vif-deleted-12fd1aa1-4663-49bc-8123-396a0fedaeb8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1692.948311] env[62508]: INFO nova.compute.manager [req-747ab405-1ba2-4b1f-9219-b246e7b60bd6 req-3f42df83-846c-49d9-95f0-c164f9ebc693 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Neutron deleted interface 12fd1aa1-4663-49bc-8123-396a0fedaeb8; detaching it from the instance and deleting it from the info cache [ 1692.948311] env[62508]: DEBUG nova.network.neutron [req-747ab405-1ba2-4b1f-9219-b246e7b60bd6 req-3f42df83-846c-49d9-95f0-c164f9ebc693 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.075120] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Successfully updated port: b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1693.091622] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776380, 'name': CreateVM_Task, 'duration_secs': 0.421855} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.092039] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1693.092838] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.093161] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.093667] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1693.094325] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6c3cfb4-1d92-4afe-a287-3b839572d0f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.102475] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1693.102475] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ceda4a-53ed-9f8c-c99b-64a4cad82353" [ 1693.102475] env[62508]: _type = "Task" [ 1693.102475] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.114761] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ceda4a-53ed-9f8c-c99b-64a4cad82353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.153555] env[62508]: DEBUG nova.objects.base [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1693.154573] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1574f3d-3971-4c0d-ade1-fbecedaaf24c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.175666] env[62508]: DEBUG nova.objects.instance [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'flavor' on Instance uuid aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.180496] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7c588b3-ba2f-4491-9383-f3a80f241b51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.190011] env[62508]: DEBUG oslo_vmware.api [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1693.190011] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52014f4b-ff3e-3d2b-eb65-33cbbf8c2641" [ 1693.190011] env[62508]: _type = "Task" [ 1693.190011] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.195067] env[62508]: DEBUG nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1693.205479] env[62508]: DEBUG oslo_vmware.api [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52014f4b-ff3e-3d2b-eb65-33cbbf8c2641, 'name': SearchDatastore_Task, 'duration_secs': 0.008133} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.206079] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.383900] env[62508]: DEBUG nova.network.neutron [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Updated VIF entry in instance network info cache for port 23e0a38a-6ba2-45bf-8535-495296b681ae. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1693.384059] env[62508]: DEBUG nova.network.neutron [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Updating instance_info_cache with network_info: [{"id": "23e0a38a-6ba2-45bf-8535-495296b681ae", "address": "fa:16:3e:b2:3a:7d", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e0a38a-6b", "ovs_interfaceid": "23e0a38a-6ba2-45bf-8535-495296b681ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.389144] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.426325] env[62508]: DEBUG nova.network.neutron [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.451910] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8b4062f-6bfb-4f6d-9ddc-e77cf5058101 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.463301] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537a1438-625a-447e-86dd-cd38d99518a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.508011] env[62508]: DEBUG nova.compute.manager [req-747ab405-1ba2-4b1f-9219-b246e7b60bd6 req-3f42df83-846c-49d9-95f0-c164f9ebc693 service nova] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Detach interface failed, port_id=12fd1aa1-4663-49bc-8123-396a0fedaeb8, reason: Instance e875f30e-2c25-46a4-8c74-36f08e7eb982 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1693.584027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.584246] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.584455] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1693.614458] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ceda4a-53ed-9f8c-c99b-64a4cad82353, 'name': SearchDatastore_Task, 'duration_secs': 0.012453} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.614767] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.615016] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1693.615291] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.615466] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.615652] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1693.615917] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee814722-205f-4ceb-9b50-89e6cde1731c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.625505] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1693.625692] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1693.626759] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c373fc-2fa7-493e-909b-4ba534193fec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.632923] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1693.632923] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528a842a-0959-9f28-7410-d76a74fbbe8b" [ 1693.632923] env[62508]: _type = "Task" [ 1693.632923] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.642031] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528a842a-0959-9f28-7410-d76a74fbbe8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.685039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-76032e2f-0393-4666-a3b6-ae414aeadbc4 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.897s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.703415] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.049s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.706424] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.752s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.706424] env[62508]: DEBUG nova.objects.instance [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'resources' on Instance uuid 5b3dd9d0-7f30-45c2-931a-ce7175820710 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.740156] env[62508]: INFO nova.scheduler.client.report [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Deleted allocations for instance c45b2c35-e58a-4ffa-861a-980747e552a1 [ 1693.887427] env[62508]: DEBUG oslo_concurrency.lockutils [req-ec0b98c5-4890-4691-ab3a-bcc1d6a763d4 req-706a48f0-59f6-456b-bd02-013304ba6f04 service nova] Releasing lock "refresh_cache-9bafe786-5815-4871-8405-558cac7b3654" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.891110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.891297] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1693.891494] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.891658] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.891810] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.891983] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.929745] env[62508]: INFO nova.compute.manager [-] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Took 1.27 seconds to deallocate network for instance. [ 1693.980716] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.981103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.981250] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.981525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.981855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.983968] env[62508]: INFO nova.compute.manager [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Terminating instance [ 1693.985731] env[62508]: DEBUG nova.compute.manager [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1693.985926] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1693.987610] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165e53ee-9af7-4709-b335-e7613e4f876a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.998361] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1693.998709] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-120f1d8b-866a-48d0-b7a6-66cd5de0ca83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.006173] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1694.006173] env[62508]: value = "task-1776381" [ 1694.006173] env[62508]: _type = "Task" [ 1694.006173] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.016276] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.120760] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1694.151114] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528a842a-0959-9f28-7410-d76a74fbbe8b, 'name': SearchDatastore_Task, 'duration_secs': 0.011518} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.152423] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4868e78-70d0-4119-8d3c-804a171af343 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.161816] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1694.161816] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52306aa4-964f-8c8e-0018-58efcb11b2fe" [ 1694.161816] env[62508]: _type = "Task" [ 1694.161816] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.179453] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52306aa4-964f-8c8e-0018-58efcb11b2fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.249109] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b8020c46-5a41-497a-b060-23dc673bc32b tempest-ServerDiskConfigTestJSON-1662059138 tempest-ServerDiskConfigTestJSON-1662059138-project-member] Lock "c45b2c35-e58a-4ffa-861a-980747e552a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.982s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.292344] env[62508]: DEBUG nova.network.neutron [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Updating instance_info_cache with network_info: [{"id": "b772290b-95f4-40f5-ae90-2e483b44a33f", "address": "fa:16:3e:65:7d:f0", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb772290b-95", "ovs_interfaceid": "b772290b-95f4-40f5-ae90-2e483b44a33f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.340335] env[62508]: INFO nova.compute.manager [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Rescuing [ 1694.340795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.340980] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.341178] env[62508]: DEBUG nova.network.neutron [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1694.399315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "a0245a18-638d-4c32-bea2-456408b5e001" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.399912] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.399912] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "a0245a18-638d-4c32-bea2-456408b5e001-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.400254] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.400478] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.405641] env[62508]: INFO nova.compute.manager [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Terminating instance [ 1694.407563] env[62508]: DEBUG nova.compute.manager [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1694.407763] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.408681] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f730608b-6905-4f1c-ab87-d0dc3336d8eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.418247] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1694.420949] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddc1197e-b91b-465f-b974-58f9802046cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.429987] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1694.429987] env[62508]: value = "task-1776382" [ 1694.429987] env[62508]: _type = "Task" [ 1694.429987] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.437325] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.444367] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1776382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.517181] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776381, 'name': PowerOffVM_Task, 'duration_secs': 0.423738} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.519732] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.519912] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.520355] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9925761-96d6-4871-8e8b-0b04b819826c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.578476] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d902f27-3939-421a-b712-92bd442d68ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.583640] env[62508]: DEBUG nova.compute.manager [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Received event network-vif-plugged-b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1694.583919] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Acquiring lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.584540] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.584735] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.584904] env[62508]: DEBUG nova.compute.manager [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] No waiting events found dispatching network-vif-plugged-b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1694.585085] env[62508]: WARNING nova.compute.manager [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Received unexpected event network-vif-plugged-b772290b-95f4-40f5-ae90-2e483b44a33f for instance with vm_state building and task_state spawning. [ 1694.585338] env[62508]: DEBUG nova.compute.manager [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Received event network-changed-b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1694.585515] env[62508]: DEBUG nova.compute.manager [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Refreshing instance network info cache due to event network-changed-b772290b-95f4-40f5-ae90-2e483b44a33f. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1694.585714] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Acquiring lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.592024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5a350e-50d0-4f89-8663-33603d5ce881 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.630765] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f56cee6-d09d-49ea-924d-74fdd01c9176 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.633494] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.633694] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.633866] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleting the datastore file [datastore1] 73841c12-1ae9-46a5-bfe0-e0f82877667c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.634114] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d9c1fd8-a7e6-4764-be50-114d918b8475 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.642727] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527cb6ea-d16f-4dfd-a3d3-b2d7dc6622e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.647980] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1694.647980] env[62508]: value = "task-1776384" [ 1694.647980] env[62508]: _type = "Task" [ 1694.647980] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.661231] env[62508]: DEBUG nova.compute.provider_tree [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.669720] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.677209] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52306aa4-964f-8c8e-0018-58efcb11b2fe, 'name': SearchDatastore_Task, 'duration_secs': 0.015218} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.677514] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.677809] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9bafe786-5815-4871-8405-558cac7b3654/9bafe786-5815-4871-8405-558cac7b3654.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1694.678131] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25765564-5251-4f81-b48f-646a31931f1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.687711] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1694.687711] env[62508]: value = "task-1776385" [ 1694.687711] env[62508]: _type = "Task" [ 1694.687711] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.698142] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.796455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.796823] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Instance network_info: |[{"id": "b772290b-95f4-40f5-ae90-2e483b44a33f", "address": "fa:16:3e:65:7d:f0", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb772290b-95", "ovs_interfaceid": "b772290b-95f4-40f5-ae90-2e483b44a33f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1694.797318] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Acquired lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.797623] env[62508]: DEBUG nova.network.neutron [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Refreshing network info cache for port b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1694.800432] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:7d:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b772290b-95f4-40f5-ae90-2e483b44a33f', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1694.808869] env[62508]: DEBUG oslo.service.loopingcall [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.812187] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1694.812840] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27da9b13-e4cb-456d-9699-7c6df3609119 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.835129] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1694.835129] env[62508]: value = "task-1776386" [ 1694.835129] env[62508]: _type = "Task" [ 1694.835129] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.847427] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776386, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.950146] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1776382, 'name': PowerOffVM_Task, 'duration_secs': 0.383137} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.950866] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.951154] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.951707] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-289b1816-4fc1-4555-86a7-29ef8caec2ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.128387] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1695.128860] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1695.129228] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Deleting the datastore file [datastore1] a0245a18-638d-4c32-bea2-456408b5e001 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1695.129948] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41048777-86a4-4360-a950-e2da3b271613 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.139349] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for the task: (returnval){ [ 1695.139349] env[62508]: value = "task-1776388" [ 1695.139349] env[62508]: _type = "Task" [ 1695.139349] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.150239] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1776388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.166650] env[62508]: DEBUG nova.scheduler.client.report [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1695.170377] env[62508]: DEBUG oslo_vmware.api [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165188} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.174025] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.174025] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.174025] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.174025] env[62508]: INFO nova.compute.manager [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1695.174025] env[62508]: DEBUG oslo.service.loopingcall [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.174025] env[62508]: DEBUG nova.compute.manager [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1695.174025] env[62508]: DEBUG nova.network.neutron [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.179981] env[62508]: DEBUG nova.network.neutron [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Updated VIF entry in instance network info cache for port b772290b-95f4-40f5-ae90-2e483b44a33f. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1695.180478] env[62508]: DEBUG nova.network.neutron [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Updating instance_info_cache with network_info: [{"id": "b772290b-95f4-40f5-ae90-2e483b44a33f", "address": "fa:16:3e:65:7d:f0", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb772290b-95", "ovs_interfaceid": "b772290b-95f4-40f5-ae90-2e483b44a33f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.198111] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776385, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.300226] env[62508]: DEBUG nova.network.neutron [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.347679] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776386, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.366400] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.366400] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.651601] env[62508]: DEBUG oslo_vmware.api [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Task: {'id': task-1776388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346147} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.651904] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.652072] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.652260] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.652440] env[62508]: INFO nova.compute.manager [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1695.652683] env[62508]: DEBUG oslo.service.loopingcall [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.652878] env[62508]: DEBUG nova.compute.manager [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1695.652975] env[62508]: DEBUG nova.network.neutron [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.677017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.680103] env[62508]: DEBUG nova.compute.manager [req-206b088e-20df-41a0-ad0c-87dc40a2095c req-71a3cd1e-1777-439d-aae0-fe4dc103522e service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Received event network-vif-deleted-aa763265-6f55-4c9b-88e0-e91807170310 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.680461] env[62508]: INFO nova.compute.manager [req-206b088e-20df-41a0-ad0c-87dc40a2095c req-71a3cd1e-1777-439d-aae0-fe4dc103522e service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Neutron deleted interface aa763265-6f55-4c9b-88e0-e91807170310; detaching it from the instance and deleting it from the info cache [ 1695.680569] env[62508]: DEBUG nova.network.neutron [req-206b088e-20df-41a0-ad0c-87dc40a2095c req-71a3cd1e-1777-439d-aae0-fe4dc103522e service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.681890] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.654s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.683800] env[62508]: INFO nova.compute.claims [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.687540] env[62508]: DEBUG oslo_concurrency.lockutils [req-703c28f8-dee1-4c52-8137-a7c7a0c0095a req-21061910-c5e4-420d-998a-30635d7b1b67 service nova] Releasing lock "refresh_cache-2a564423-a2fd-4873-885f-37777bdd83eb" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.702881] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.703196] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 9bafe786-5815-4871-8405-558cac7b3654/9bafe786-5815-4871-8405-558cac7b3654.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1695.703427] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1695.703715] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59b37841-cf7b-48a4-a242-28cfffa8e959 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.709696] env[62508]: INFO nova.scheduler.client.report [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance 5b3dd9d0-7f30-45c2-931a-ce7175820710 [ 1695.715369] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1695.715369] env[62508]: value = "task-1776389" [ 1695.715369] env[62508]: _type = "Task" [ 1695.715369] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.726749] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.802810] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.851387] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776386, 'name': CreateVM_Task, 'duration_secs': 0.876861} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.851670] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1695.852572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.852858] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.853344] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1695.854071] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d0d6f39-3ab9-455e-8afd-074535b26046 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.862271] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1695.862271] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d3a3d0-a609-e43a-2a2f-b06eeb4a679b" [ 1695.862271] env[62508]: _type = "Task" [ 1695.862271] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.870418] env[62508]: DEBUG nova.compute.utils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1695.881356] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d3a3d0-a609-e43a-2a2f-b06eeb4a679b, 'name': SearchDatastore_Task, 'duration_secs': 0.012759} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.882589] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.883283] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1695.883362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.883570] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.883879] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1695.884660] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd1cc119-da04-4d78-9b34-8366fb686b3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.897078] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1695.897430] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1695.898598] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0926d02-71f3-4c2d-b5c5-1a88a455e5f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.906427] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1695.906427] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d082aa-e377-9960-db49-c44c4fbb4686" [ 1695.906427] env[62508]: _type = "Task" [ 1695.906427] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.916263] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d082aa-e377-9960-db49-c44c4fbb4686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.961686] env[62508]: DEBUG nova.network.neutron [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.190457] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83536358-2c38-4e92-8cfe-cd684a7f15c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.204270] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11b21b1-9c2f-4c77-a7da-e1b512e4a7da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.223258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f7328d76-cd7d-4168-a84d-92d9887722fa tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "5b3dd9d0-7f30-45c2-931a-ce7175820710" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.284s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.232113] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129331} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.232388] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1696.233533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb80e3b-b7ec-4799-bfa5-198bc63ba8a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.254647] env[62508]: DEBUG nova.compute.manager [req-206b088e-20df-41a0-ad0c-87dc40a2095c req-71a3cd1e-1777-439d-aae0-fe4dc103522e service nova] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Detach interface failed, port_id=aa763265-6f55-4c9b-88e0-e91807170310, reason: Instance 73841c12-1ae9-46a5-bfe0-e0f82877667c could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1696.279394] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 9bafe786-5815-4871-8405-558cac7b3654/9bafe786-5815-4871-8405-558cac7b3654.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1696.280185] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba710c37-fe0d-490f-b1a5-7b2c68fe7272 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.311422] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1696.311422] env[62508]: value = "task-1776390" [ 1696.311422] env[62508]: _type = "Task" [ 1696.311422] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.323558] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.345965] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.346429] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b0b4859-d595-4db5-b44b-d47179b39877 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.355766] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1696.355766] env[62508]: value = "task-1776391" [ 1696.355766] env[62508]: _type = "Task" [ 1696.355766] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.370220] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.374827] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.421031] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d082aa-e377-9960-db49-c44c4fbb4686, 'name': SearchDatastore_Task, 'duration_secs': 0.012091} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.421031] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fdf96d4-110b-47f5-866a-616138614b19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.427141] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1696.427141] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c57eb-c210-79d3-9c66-4dfaeb929735" [ 1696.427141] env[62508]: _type = "Task" [ 1696.427141] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.437861] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c57eb-c210-79d3-9c66-4dfaeb929735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.464654] env[62508]: INFO nova.compute.manager [-] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Took 1.29 seconds to deallocate network for instance. [ 1696.629975] env[62508]: DEBUG nova.network.neutron [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.822948] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776390, 'name': ReconfigVM_Task, 'duration_secs': 0.373898} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.825827] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 9bafe786-5815-4871-8405-558cac7b3654/9bafe786-5815-4871-8405-558cac7b3654.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1696.826650] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a7d3a88-6a14-4b48-aef6-9a56c0fdf7e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.837068] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1696.837068] env[62508]: value = "task-1776392" [ 1696.837068] env[62508]: _type = "Task" [ 1696.837068] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.855459] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776392, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.866624] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776391, 'name': PowerOffVM_Task, 'duration_secs': 0.251964} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.867161] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.867816] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d77c15b-011e-4554-8dc1-969da43df8ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.893773] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aa3d03-97c9-4424-b42b-e6855c202907 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.940019] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525c57eb-c210-79d3-9c66-4dfaeb929735, 'name': SearchDatastore_Task, 'duration_secs': 0.016093} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.942212] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.942565] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.942838] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2a564423-a2fd-4873-885f-37777bdd83eb/2a564423-a2fd-4873-885f-37777bdd83eb.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1696.943096] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0cd29cc-137e-4de0-996f-484e373b9776 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.944788] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8cdc6f0-d1fc-4d5e-a4f7-26f31d322bbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.958856] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1696.958856] env[62508]: value = "task-1776394" [ 1696.958856] env[62508]: _type = "Task" [ 1696.958856] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.958856] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1696.958856] env[62508]: value = "task-1776393" [ 1696.958856] env[62508]: _type = "Task" [ 1696.958856] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.973236] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.982593] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.987608] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1696.987857] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1696.988680] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.988680] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.988680] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1696.988965] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-873b66ca-1aed-4094-bd8c-5615f9466181 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.002025] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1697.002278] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1697.003069] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa246cac-c272-445d-81e0-8f78bf7903d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.014915] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1697.014915] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52402d18-596f-c95c-eb26-9f4f84774134" [ 1697.014915] env[62508]: _type = "Task" [ 1697.014915] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.024755] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52402d18-596f-c95c-eb26-9f4f84774134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.105198] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c79bec-56f5-4373-8778-34a9d84b510b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.115361] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a44b3b-f7c7-4c40-a4b1-0c506247b355 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.149684] env[62508]: INFO nova.compute.manager [-] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Took 1.50 seconds to deallocate network for instance. [ 1697.152574] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253c9afa-1384-4f50-a99e-7f649dc6be2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.166242] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193ef73d-88b3-4a84-97a4-bdf8b0a26fb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.176289] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.176586] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.192371] env[62508]: DEBUG nova.compute.provider_tree [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.351079] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776392, 'name': Rename_Task, 'duration_secs': 0.170975} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.351453] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1697.351959] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ada87195-6edb-4011-8eb4-1eb77fb0d1a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.362059] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1697.362059] env[62508]: value = "task-1776395" [ 1697.362059] env[62508]: _type = "Task" [ 1697.362059] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.373413] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.460287] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.460379] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.460614] env[62508]: INFO nova.compute.manager [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Attaching volume 08bd2c21-2d3a-43bb-988d-08d5d2dfa691 to /dev/sdb [ 1697.476556] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50679} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.477174] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2a564423-a2fd-4873-885f-37777bdd83eb/2a564423-a2fd-4873-885f-37777bdd83eb.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1697.477174] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1697.477447] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2183d693-b4e9-4178-ab8a-8ca42efdba96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.484475] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1697.484475] env[62508]: value = "task-1776396" [ 1697.484475] env[62508]: _type = "Task" [ 1697.484475] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.496029] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.497271] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.500500] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b03c6c0-f564-469c-866a-1056398453af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.510216] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec7d21-37ea-4dc8-b2f7-ba282dbbc78c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.527114] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52402d18-596f-c95c-eb26-9f4f84774134, 'name': SearchDatastore_Task, 'duration_secs': 0.01163} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.532027] env[62508]: DEBUG nova.virt.block_device [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating existing volume attachment record: 00c1e999-78b7-4167-9bdf-88cf54ccdd0b {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1697.534533] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2017cc79-85de-4145-bbdb-703c1aa0c908 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.541740] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1697.541740] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b44a7d-15e0-9e98-0110-1684bbfd56ab" [ 1697.541740] env[62508]: _type = "Task" [ 1697.541740] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.554028] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b44a7d-15e0-9e98-0110-1684bbfd56ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.663760] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.679575] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1697.697096] env[62508]: DEBUG nova.scheduler.client.report [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1697.709038] env[62508]: DEBUG nova.compute.manager [req-c4e573c3-9ee9-46bf-96a1-1d89f4fb703f req-deddef50-5e0d-44b4-9fad-619c5e559462 service nova] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Received event network-vif-deleted-97e5cf3e-babc-43e2-8c55-e5f37f2ed5ad {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.873667] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776395, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.001134] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084276} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.001134] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.001134] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1698.001134] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d813e43-5f6d-4307-9e74-f16c5c33c604 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.028862] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 2a564423-a2fd-4873-885f-37777bdd83eb/2a564423-a2fd-4873-885f-37777bdd83eb.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1698.028862] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17798683-1913-4afe-8730-266d8662e3d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.061021] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b44a7d-15e0-9e98-0110-1684bbfd56ab, 'name': SearchDatastore_Task, 'duration_secs': 0.011247} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.061021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.061525] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. {{(pid=62508) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1698.061626] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1698.061626] env[62508]: value = "task-1776400" [ 1698.061626] env[62508]: _type = "Task" [ 1698.061626] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.061800] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdbe7350-47bb-4fdc-b1bb-c59e74d530b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.072579] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1698.072579] env[62508]: value = "task-1776401" [ 1698.072579] env[62508]: _type = "Task" [ 1698.072579] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.076021] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776400, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.087117] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.204518] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.205277] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.205832] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1698.209751] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.801s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.209751] env[62508]: DEBUG nova.objects.instance [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1698.373208] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776395, 'name': PowerOnVM_Task, 'duration_secs': 0.621743} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.373649] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1698.374508] env[62508]: INFO nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1698.374508] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1698.375032] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91fc008-f3dc-418f-9915-de5e2b5a86e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.575443] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776400, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.589620] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776401, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.716925] env[62508]: DEBUG nova.compute.utils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1698.718526] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1698.718669] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1698.773322] env[62508]: DEBUG nova.policy [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b8c2247de96403f842862d8ec7960db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c8a9a71f05d4991a6dfaa8aed156e84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1698.893436] env[62508]: INFO nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Took 15.02 seconds to build instance. [ 1699.076575] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776400, 'name': ReconfigVM_Task, 'duration_secs': 0.894604} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.076862] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 2a564423-a2fd-4873-885f-37777bdd83eb/2a564423-a2fd-4873-885f-37777bdd83eb.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1699.078606] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c72864c5-0b7b-4293-a017-5245e2f34bed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.091104] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.816415} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.092759] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk. [ 1699.092759] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1699.092759] env[62508]: value = "task-1776402" [ 1699.092759] env[62508]: _type = "Task" [ 1699.092759] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.097116] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d7dd89-be47-40a0-ac52-8388fa37f0ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.105707] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776402, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.129532] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1699.130546] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Successfully created port: 5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1699.132393] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-194b4fff-def8-4ae0-ac6c-325e0f2b472a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.152683] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1699.152683] env[62508]: value = "task-1776403" [ 1699.152683] env[62508]: _type = "Task" [ 1699.152683] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.163686] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.220209] env[62508]: DEBUG oslo_concurrency.lockutils [None req-520fd46b-78e0-41b0-a3ef-26320dbd06fb tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.221443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.015s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.226032] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1699.396751] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.531s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.606367] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776402, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.663138] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776403, 'name': ReconfigVM_Task, 'duration_secs': 0.41857} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.663436] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfigured VM instance instance-00000038 to attach disk [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7-rescue.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1699.664326] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495a825a-0464-4646-9380-cf5bfe0ed627 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.693363] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c094c5fa-3dc7-47a4-a9c2-8dd6eebc385b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.711121] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1699.711121] env[62508]: value = "task-1776404" [ 1699.711121] env[62508]: _type = "Task" [ 1699.711121] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.721524] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776404, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.744046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.744046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.744287] env[62508]: DEBUG nova.objects.instance [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.013850] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2042992a-73be-4242-9a00-c1c919f93a18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.023140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f37a680-ae1f-4f25-899e-32c6ec41aa55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.055270] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b845ffa2-b3d3-47f3-a63c-2a15860efddf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.063741] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0008976-9c71-455e-b9cb-9dc81945e6e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.079059] env[62508]: DEBUG nova.compute.provider_tree [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1700.110041] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776402, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.226460] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776404, 'name': ReconfigVM_Task, 'duration_secs': 0.189954} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.226823] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1700.227127] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36112b8b-4113-459b-9628-47acd64682e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.235389] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1700.237634] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1700.237634] env[62508]: value = "task-1776406" [ 1700.237634] env[62508]: _type = "Task" [ 1700.237634] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.248758] env[62508]: DEBUG nova.objects.instance [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.249831] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.270262] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1700.270629] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1700.270629] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.270819] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1700.270984] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.271298] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1700.273034] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1700.273034] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1700.273272] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1700.273417] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1700.273619] env[62508]: DEBUG nova.virt.hardware [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1700.275639] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d95b0ec-eb26-4b53-aded-36429e26180d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.286084] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe8304e-ba72-43ef-9a9a-fc42e2db9a80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.345584] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.345870] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.346100] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.346288] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.346457] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.348645] env[62508]: INFO nova.compute.manager [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Terminating instance [ 1700.350368] env[62508]: DEBUG nova.compute.manager [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1700.350560] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.351391] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61210709-2604-41e8-914b-e0378c23de79 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.359704] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.359944] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b15a79c3-dccd-4499-96fe-91847c230143 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.367840] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1700.367840] env[62508]: value = "task-1776407" [ 1700.367840] env[62508]: _type = "Task" [ 1700.367840] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.379569] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.603723] env[62508]: ERROR nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [req-e5bdbe50-1ef3-457f-a6aa-5d00a68b7f87] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e5bdbe50-1ef3-457f-a6aa-5d00a68b7f87"}]} [ 1700.613402] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776402, 'name': Rename_Task, 'duration_secs': 1.345234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.613779] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1700.614142] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbfd5773-98c8-41fa-ab31-a7c254607b3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.624774] env[62508]: DEBUG nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1700.627429] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1700.627429] env[62508]: value = "task-1776408" [ 1700.627429] env[62508]: _type = "Task" [ 1700.627429] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.637282] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.640297] env[62508]: DEBUG nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1700.640723] env[62508]: DEBUG nova.compute.provider_tree [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1700.654894] env[62508]: DEBUG nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1700.679620] env[62508]: DEBUG nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1700.695198] env[62508]: DEBUG nova.compute.manager [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Received event network-vif-plugged-5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.698756] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] Acquiring lock "806102ec-7622-4770-91c9-8c5723893dec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.698756] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] Lock "806102ec-7622-4770-91c9-8c5723893dec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.698756] env[62508]: DEBUG oslo_concurrency.lockutils [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] Lock "806102ec-7622-4770-91c9-8c5723893dec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.698756] env[62508]: DEBUG nova.compute.manager [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] No waiting events found dispatching network-vif-plugged-5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1700.698756] env[62508]: WARNING nova.compute.manager [req-2a715736-9544-4d14-8ff7-9d377d922ebc req-bc2bb748-0147-4fb6-a55f-d316ccfcd70e service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Received unexpected event network-vif-plugged-5c6d20a1-1c14-4874-b295-9828a9172d8d for instance with vm_state building and task_state spawning. [ 1700.736544] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Successfully updated port: 5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1700.750110] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776406, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.753985] env[62508]: DEBUG nova.objects.base [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1700.754207] env[62508]: DEBUG nova.network.neutron [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1700.844445] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e18502d6-1226-4a32-bfaf-313825d8e36e tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.100s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.882935] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776407, 'name': PowerOffVM_Task, 'duration_secs': 0.286015} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.886102] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.886308] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.886987] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd60eff9-abdd-40e3-84df-a3ce715e9f84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.033778] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ecba41-84cf-43af-a8f2-61aeb7bb1887 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.042665] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342bb8cd-9c0d-44da-9823-09a696f3e450 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.075706] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46b22a3-b0fe-4599-a292-e2789dedc7a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.084134] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaae30dd-2846-4d06-8634-7fb6207481b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.101983] env[62508]: DEBUG nova.compute.provider_tree [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1701.133407] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1701.133779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1701.133999] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleting the datastore file [datastore1] 4d24bacc-48c4-4649-bb29-fcae2cf77782 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1701.134695] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c076b6c-88fa-4069-b952-7f1dfcf20fcd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.140728] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776408, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.143230] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1701.143230] env[62508]: value = "task-1776410" [ 1701.143230] env[62508]: _type = "Task" [ 1701.143230] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.152685] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.240744] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.240938] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquired lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.241058] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1701.261763] env[62508]: DEBUG oslo_vmware.api [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776406, 'name': PowerOnVM_Task, 'duration_secs': 0.522408} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.262681] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1701.265272] env[62508]: DEBUG nova.compute.manager [None req-1d619445-7384-46d1-8878-27b2f51df9b8 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1701.266164] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fce3892-3ebf-4437-a676-5ab9c49fd34b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.638595] env[62508]: DEBUG nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1701.639033] env[62508]: DEBUG nova.compute.provider_tree [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 132 to 133 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1701.639339] env[62508]: DEBUG nova.compute.provider_tree [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1701.653646] env[62508]: DEBUG oslo_vmware.api [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776408, 'name': PowerOnVM_Task, 'duration_secs': 0.771279} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.654946] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1701.654946] env[62508]: INFO nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Took 8.98 seconds to spawn the instance on the hypervisor. [ 1701.655060] env[62508]: DEBUG nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1701.656649] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e998017-ba46-4fd0-bb57-4756e48cd9bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.665043] env[62508]: DEBUG oslo_vmware.api [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24826} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.665635] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.665825] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.665994] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.666183] env[62508]: INFO nova.compute.manager [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1701.666417] env[62508]: DEBUG oslo.service.loopingcall [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.666888] env[62508]: DEBUG nova.compute.manager [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1701.666985] env[62508]: DEBUG nova.network.neutron [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1701.799110] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1702.038657] env[62508]: DEBUG nova.network.neutron [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updating instance_info_cache with network_info: [{"id": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "address": "fa:16:3e:33:a8:e4", "network": {"id": "472bd29e-33d3-4fb0-915b-13269b954453", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1601635694-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8a9a71f05d4991a6dfaa8aed156e84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6d20a1-1c", "ovs_interfaceid": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.083874] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1702.084127] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368783', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'name': 'volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '24091abb-f71f-4528-8fc5-b97725cf079e', 'attached_at': '', 'detached_at': '', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'serial': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1702.085040] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7358a6-0f67-4509-a5e3-ff03d1bc16a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.102053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11535ba9-0a98-4291-a2bb-5f29ad76d222 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.128017] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691/volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1702.128671] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9134541a-7eca-4e94-9a4f-16a1cddaa789 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.147699] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1702.147699] env[62508]: value = "task-1776411" [ 1702.147699] env[62508]: _type = "Task" [ 1702.147699] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.159472] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.182121] env[62508]: INFO nova.compute.manager [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Took 18.25 seconds to build instance. [ 1702.291032] env[62508]: INFO nova.compute.manager [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Unrescuing [ 1702.291393] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.291622] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.291857] env[62508]: DEBUG nova.network.neutron [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1702.540546] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Releasing lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.545023] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Instance network_info: |[{"id": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "address": "fa:16:3e:33:a8:e4", "network": {"id": "472bd29e-33d3-4fb0-915b-13269b954453", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1601635694-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8a9a71f05d4991a6dfaa8aed156e84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6d20a1-1c", "ovs_interfaceid": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1702.545023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:a8:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cde23701-02ca-4cb4-b5a6-d321f8ac9660', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c6d20a1-1c14-4874-b295-9828a9172d8d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.555033] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Creating folder: Project (9c8a9a71f05d4991a6dfaa8aed156e84). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1702.556404] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9f356e5-2099-4037-89ea-bc98c66204b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.569703] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Created folder: Project (9c8a9a71f05d4991a6dfaa8aed156e84) in parent group-v368536. [ 1702.569957] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Creating folder: Instances. Parent ref: group-v368784. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1702.570217] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28812b34-5f90-4241-94ed-890844031a8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.581566] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Created folder: Instances in parent group-v368784. [ 1702.582573] env[62508]: DEBUG oslo.service.loopingcall [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.582573] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1702.582573] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2af4afd-4d6f-40b3-b4cc-109b1958bfb0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.603445] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.603445] env[62508]: value = "task-1776414" [ 1702.603445] env[62508]: _type = "Task" [ 1702.603445] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.613479] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776414, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.658743] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.662216] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.441s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.662429] env[62508]: DEBUG nova.compute.manager [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62508) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1702.665368] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.228s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.665695] env[62508]: DEBUG nova.objects.instance [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lazy-loading 'resources' on Instance uuid e875f30e-2c25-46a4-8c74-36f08e7eb982 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.686076] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c263d11-3905-4940-b006-c79a966a3bcc tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.768s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.773437] env[62508]: DEBUG nova.network.neutron [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.855346] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.855618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.855950] env[62508]: DEBUG nova.objects.instance [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.940035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "9bafe786-5815-4871-8405-558cac7b3654" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.940487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.940708] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "9bafe786-5815-4871-8405-558cac7b3654-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.940892] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.941070] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.943215] env[62508]: INFO nova.compute.manager [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Terminating instance [ 1702.945827] env[62508]: DEBUG nova.compute.manager [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1702.946073] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.946999] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40777ce5-63ef-41e1-bc9b-4ceacb51b7d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.958283] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.958585] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c07753b-643c-46a6-bf09-d828d607b636 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.966140] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1702.966140] env[62508]: value = "task-1776415" [ 1702.966140] env[62508]: _type = "Task" [ 1702.966140] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.976085] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.041442] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "2a564423-a2fd-4873-885f-37777bdd83eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.041821] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.042046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.042254] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.042428] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.044738] env[62508]: INFO nova.compute.manager [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Terminating instance [ 1703.046792] env[62508]: DEBUG nova.compute.manager [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1703.047010] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1703.048155] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd58c98-715f-4136-b6b3-d0ce9152fe81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.057056] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1703.058239] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-decc2568-70af-4988-ac9c-741ba3b66269 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.066154] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1703.066154] env[62508]: value = "task-1776416" [ 1703.066154] env[62508]: _type = "Task" [ 1703.066154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.079599] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.115887] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776414, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.161069] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776411, 'name': ReconfigVM_Task, 'duration_secs': 0.781865} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.161371] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691/volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1703.166499] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e655498-b0ea-4da9-95e2-ee7130393e7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.186125] env[62508]: DEBUG nova.compute.manager [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Received event network-changed-5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1703.186316] env[62508]: DEBUG nova.compute.manager [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Refreshing instance network info cache due to event network-changed-5c6d20a1-1c14-4874-b295-9828a9172d8d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1703.186524] env[62508]: DEBUG oslo_concurrency.lockutils [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] Acquiring lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.186665] env[62508]: DEBUG oslo_concurrency.lockutils [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] Acquired lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.186871] env[62508]: DEBUG nova.network.neutron [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Refreshing network info cache for port 5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1703.190841] env[62508]: DEBUG nova.network.neutron [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.195747] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1703.195747] env[62508]: value = "task-1776417" [ 1703.195747] env[62508]: _type = "Task" [ 1703.195747] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.209972] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.254407] env[62508]: INFO nova.scheduler.client.report [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted allocation for migration 3dac5208-b604-4108-b35a-0de7840810dc [ 1703.276252] env[62508]: INFO nova.compute.manager [-] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Took 1.61 seconds to deallocate network for instance. [ 1703.466792] env[62508]: DEBUG nova.objects.instance [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1703.480379] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.504702] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672d2c93-8046-4fab-ab51-e2d639a775df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.512919] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94786823-69f2-4c0d-8fdc-87bbfef887ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.544841] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b041e5-b6ff-43da-899a-31a744432b1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.553517] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba4a358-f1d4-44b7-be5b-0b51504021b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.569973] env[62508]: DEBUG nova.compute.provider_tree [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.581970] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.616959] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776414, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.693301] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.693891] env[62508]: DEBUG nova.objects.instance [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'flavor' on Instance uuid aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1703.707808] env[62508]: DEBUG oslo_vmware.api [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776417, 'name': ReconfigVM_Task, 'duration_secs': 0.177851} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.707953] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368783', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'name': 'volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '24091abb-f71f-4528-8fc5-b97725cf079e', 'attached_at': '', 'detached_at': '', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'serial': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1703.761888] env[62508]: DEBUG oslo_concurrency.lockutils [None req-0a4d5863-305d-4613-9095-0cc1a6021e0d tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 13.614s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.785371] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.958785] env[62508]: DEBUG nova.network.neutron [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updated VIF entry in instance network info cache for port 5c6d20a1-1c14-4874-b295-9828a9172d8d. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.959287] env[62508]: DEBUG nova.network.neutron [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updating instance_info_cache with network_info: [{"id": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "address": "fa:16:3e:33:a8:e4", "network": {"id": "472bd29e-33d3-4fb0-915b-13269b954453", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1601635694-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8a9a71f05d4991a6dfaa8aed156e84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6d20a1-1c", "ovs_interfaceid": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.975039] env[62508]: DEBUG nova.objects.base [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1703.975280] env[62508]: DEBUG nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1703.980527] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776415, 'name': PowerOffVM_Task, 'duration_secs': 0.864107} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.981069] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1703.981248] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1703.981515] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eae921ad-5fe8-4ad6-b4fd-7f26c1a51789 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.052680] env[62508]: DEBUG nova.policy [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1704.077382] env[62508]: DEBUG nova.scheduler.client.report [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1704.083682] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776416, 'name': PowerOffVM_Task, 'duration_secs': 0.764275} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.084153] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1704.084327] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1704.084572] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ad2db0d-3b5b-4b42-b53c-7aea39395082 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.117904] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776414, 'name': CreateVM_Task, 'duration_secs': 1.247056} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.118206] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1704.118920] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.119688] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.119688] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.119688] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29e6dc17-adcb-4de4-aab1-da0ab641bb39 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.126284] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1704.126284] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be60c3-29c0-d835-a4ca-4fabde286a71" [ 1704.126284] env[62508]: _type = "Task" [ 1704.126284] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.134065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.134333] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.134649] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleting the datastore file [datastore1] 9bafe786-5815-4871-8405-558cac7b3654 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.135042] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b137c0bc-3c9a-45f1-93cc-9dd4b6264a9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.141830] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be60c3-29c0-d835-a4ca-4fabde286a71, 'name': SearchDatastore_Task, 'duration_secs': 0.012327} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.142717] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.142990] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.143346] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.143501] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.143706] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.144050] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e9aa28e-e68f-4321-822c-06891161b9c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.148126] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1704.148126] env[62508]: value = "task-1776420" [ 1704.148126] env[62508]: _type = "Task" [ 1704.148126] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.154510] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.154649] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.158141] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7deeb112-25eb-4662-80d9-b984b905193a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.160467] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.164266] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1704.164266] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb71fb-0909-06cb-9fc2-dd996f66aa30" [ 1704.164266] env[62508]: _type = "Task" [ 1704.164266] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.175465] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb71fb-0909-06cb-9fc2-dd996f66aa30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.200457] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8f5d96-b36d-449a-9f2b-1ea9409eb041 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.228875] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.229746] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01d8a302-9791-49c5-9e74-2d0cba8b25ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.238036] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1704.238036] env[62508]: value = "task-1776421" [ 1704.238036] env[62508]: _type = "Task" [ 1704.238036] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.247741] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.251349] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.251541] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.251735] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleting the datastore file [datastore1] 2a564423-a2fd-4873-885f-37777bdd83eb {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.252025] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5d43047-055a-4e13-9b47-630ce195427d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.260711] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1704.260711] env[62508]: value = "task-1776422" [ 1704.260711] env[62508]: _type = "Task" [ 1704.260711] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.275154] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.298557] env[62508]: DEBUG nova.objects.instance [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.390759] env[62508]: DEBUG nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Successfully created port: 09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1704.461498] env[62508]: DEBUG oslo_concurrency.lockutils [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] Releasing lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.462226] env[62508]: DEBUG nova.compute.manager [req-adf42d3c-400c-4f7a-92fd-cbf6c9f45708 req-75af2c56-2d5b-4b9c-8d70-61d7a48c6108 service nova] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Received event network-vif-deleted-c969ec53-5930-48a3-bad6-aaa89e2519c6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.585796] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.588241] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.615s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.588779] env[62508]: DEBUG nova.objects.instance [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lazy-loading 'resources' on Instance uuid 73841c12-1ae9-46a5-bfe0-e0f82877667c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.614648] env[62508]: INFO nova.scheduler.client.report [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted allocations for instance e875f30e-2c25-46a4-8c74-36f08e7eb982 [ 1704.660693] env[62508]: DEBUG oslo_vmware.api [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173859} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.660990] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.663665] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.663665] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.663665] env[62508]: INFO nova.compute.manager [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1704.663665] env[62508]: DEBUG oslo.service.loopingcall [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.663665] env[62508]: DEBUG nova.compute.manager [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1704.663665] env[62508]: DEBUG nova.network.neutron [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.675380] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb71fb-0909-06cb-9fc2-dd996f66aa30, 'name': SearchDatastore_Task, 'duration_secs': 0.010862} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.676406] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8203337c-b151-485c-8f57-bc8e05146cd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.682815] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1704.682815] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f845a-6ddd-3727-328a-9f9d1708cd4b" [ 1704.682815] env[62508]: _type = "Task" [ 1704.682815] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.691529] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f845a-6ddd-3727-328a-9f9d1708cd4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.751054] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776421, 'name': PowerOffVM_Task, 'duration_secs': 0.262675} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.751402] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1704.757232] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfiguring VM instance instance-00000038 to detach disk 2002 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1704.757565] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1744ea4-be77-4d0c-8bba-86d30cb9ca78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.780893] env[62508]: DEBUG oslo_vmware.api [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145636} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.782451] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.782701] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.782951] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.783182] env[62508]: INFO nova.compute.manager [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1704.783464] env[62508]: DEBUG oslo.service.loopingcall [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.784507] env[62508]: DEBUG nova.objects.instance [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.786082] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1704.786082] env[62508]: value = "task-1776423" [ 1704.786082] env[62508]: _type = "Task" [ 1704.786082] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.786494] env[62508]: DEBUG nova.compute.manager [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1704.786627] env[62508]: DEBUG nova.network.neutron [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.800070] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.806347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.806347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.806347] env[62508]: DEBUG nova.network.neutron [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1704.806347] env[62508]: DEBUG nova.objects.instance [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'info_cache' on Instance uuid e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.943353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.123917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-28bf7e27-a258-44cb-8eeb-bab7a4e68202 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "e875f30e-2c25-46a4-8c74-36f08e7eb982" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.196759] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527f845a-6ddd-3727-328a-9f9d1708cd4b, 'name': SearchDatastore_Task, 'duration_secs': 0.011192} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.199454] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.199757] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 806102ec-7622-4770-91c9-8c5723893dec/806102ec-7622-4770-91c9-8c5723893dec.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1705.200284] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03031a46-17bb-4245-a57b-f86707a3217c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.207835] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1705.207835] env[62508]: value = "task-1776424" [ 1705.207835] env[62508]: _type = "Task" [ 1705.207835] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.215446] env[62508]: DEBUG nova.compute.manager [req-ea7c7ae5-f3e5-4f31-a27a-68556f56db5d req-fcdd766d-b663-4a51-bf16-91073200440b service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Received event network-vif-deleted-b772290b-95f4-40f5-ae90-2e483b44a33f {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1705.215632] env[62508]: INFO nova.compute.manager [req-ea7c7ae5-f3e5-4f31-a27a-68556f56db5d req-fcdd766d-b663-4a51-bf16-91073200440b service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Neutron deleted interface b772290b-95f4-40f5-ae90-2e483b44a33f; detaching it from the instance and deleting it from the info cache [ 1705.215799] env[62508]: DEBUG nova.network.neutron [req-ea7c7ae5-f3e5-4f31-a27a-68556f56db5d req-fcdd766d-b663-4a51-bf16-91073200440b service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.222804] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.276716] env[62508]: DEBUG nova.compute.manager [req-e98f2c0b-6b97-418e-a4fa-645c7d3625ca req-f13fd524-86fa-41d3-995c-4897150d73b6 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Received event network-vif-deleted-23e0a38a-6ba2-45bf-8535-495296b681ae {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1705.276924] env[62508]: INFO nova.compute.manager [req-e98f2c0b-6b97-418e-a4fa-645c7d3625ca req-f13fd524-86fa-41d3-995c-4897150d73b6 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Neutron deleted interface 23e0a38a-6ba2-45bf-8535-495296b681ae; detaching it from the instance and deleting it from the info cache [ 1705.277073] env[62508]: DEBUG nova.network.neutron [req-e98f2c0b-6b97-418e-a4fa-645c7d3625ca req-f13fd524-86fa-41d3-995c-4897150d73b6 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.289140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1a8b8f9-3c72-44ab-9f18-2456c0cb59c3 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.829s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.290127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.347s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.290299] env[62508]: DEBUG nova.compute.manager [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1705.292020] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34b6438-66c6-4441-81bf-dd674e0b0525 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.307406] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776423, 'name': ReconfigVM_Task, 'duration_secs': 0.262946} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.310445] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfigured VM instance instance-00000038 to detach disk 2002 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1705.310513] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1705.311807] env[62508]: DEBUG nova.objects.base [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1705.313088] env[62508]: DEBUG nova.compute.manager [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1705.313638] env[62508]: DEBUG nova.objects.instance [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1705.317216] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56359d25-1e7a-4a33-ae77-d335b7fa6a71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.329152] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1705.329152] env[62508]: value = "task-1776425" [ 1705.329152] env[62508]: _type = "Task" [ 1705.329152] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.341755] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.403399] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef7c1a-48ef-43b7-88d0-ce06d466c190 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.412591] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb21ef86-da6d-45ee-a9d9-985968e25968 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.447214] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c989e37-aea4-45a1-a7f8-7731f83d3904 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.457424] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0f6947-09b5-4355-988f-1802f1e3c486 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.474412] env[62508]: DEBUG nova.compute.provider_tree [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.521358] env[62508]: DEBUG nova.network.neutron [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.531446] env[62508]: DEBUG nova.network.neutron [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.719583] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776424, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.719903] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5cdf992-8f7f-4fb7-97de-dcaff99c2636 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.729704] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e8bb4f-6054-4a5e-8916-a4118732b090 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.770978] env[62508]: DEBUG nova.compute.manager [req-ea7c7ae5-f3e5-4f31-a27a-68556f56db5d req-fcdd766d-b663-4a51-bf16-91073200440b service nova] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Detach interface failed, port_id=b772290b-95f4-40f5-ae90-2e483b44a33f, reason: Instance 2a564423-a2fd-4873-885f-37777bdd83eb could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1705.784021] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa6c9e89-d315-4a71-a642-1ffd4b4ebff2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.793905] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0997d612-adf4-4d9b-b7e3-c4568cea131f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.832868] env[62508]: DEBUG nova.compute.manager [req-e98f2c0b-6b97-418e-a4fa-645c7d3625ca req-f13fd524-86fa-41d3-995c-4897150d73b6 service nova] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Detach interface failed, port_id=23e0a38a-6ba2-45bf-8535-495296b681ae, reason: Instance 9bafe786-5815-4871-8405-558cac7b3654 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1705.833537] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1705.833836] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1bdd591-cb27-46d9-9659-447bc30c349a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.845506] env[62508]: DEBUG oslo_vmware.api [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776425, 'name': PowerOnVM_Task, 'duration_secs': 0.466509} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.846867] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1705.847222] env[62508]: DEBUG nova.compute.manager [None req-0553da4e-03b6-452c-8f1d-6526ff9b6c4c tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1705.847580] env[62508]: DEBUG oslo_vmware.api [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1705.847580] env[62508]: value = "task-1776426" [ 1705.847580] env[62508]: _type = "Task" [ 1705.847580] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.848317] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17883c0f-3433-4a49-a7b7-57eb8a9927c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.861982] env[62508]: DEBUG oslo_vmware.api [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.882601] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.882943] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.883203] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.883420] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.883541] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.885888] env[62508]: INFO nova.compute.manager [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Terminating instance [ 1705.887861] env[62508]: DEBUG nova.compute.manager [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1705.888061] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1705.889097] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d4105f-da2f-41cb-aca2-8cf082e9829b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.898474] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1705.899437] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9f45100-c4b2-45c7-abc9-3c69de3ae22b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.910159] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1705.910159] env[62508]: value = "task-1776427" [ 1705.910159] env[62508]: _type = "Task" [ 1705.910159] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.919774] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.998253] env[62508]: ERROR nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [req-2649cb7d-f09c-4769-b36a-d0909a7ced30] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2649cb7d-f09c-4769-b36a-d0909a7ced30"}]} [ 1706.019820] env[62508]: DEBUG nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1706.024133] env[62508]: INFO nova.compute.manager [-] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Took 1.36 seconds to deallocate network for instance. [ 1706.034303] env[62508]: INFO nova.compute.manager [-] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Took 1.25 seconds to deallocate network for instance. [ 1706.036801] env[62508]: DEBUG nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1706.037151] env[62508]: DEBUG nova.compute.provider_tree [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.060289] env[62508]: DEBUG nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1706.078546] env[62508]: DEBUG nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Successfully updated port: 09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1706.088400] env[62508]: DEBUG nova.network.neutron [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [{"id": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "address": "fa:16:3e:82:3c:01", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec6adf65-fb", "ovs_interfaceid": "ec6adf65-fbdf-4276-8e19-eb416336bbff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.093475] env[62508]: DEBUG nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1706.220469] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776424, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.360332] env[62508]: DEBUG oslo_vmware.api [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776426, 'name': PowerOffVM_Task, 'duration_secs': 0.431371} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.362732] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1706.363730] env[62508]: DEBUG nova.compute.manager [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.363868] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b865f0-32a3-42c8-97b6-e0820c286983 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.375010] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449bd2d4-1259-41d7-ac2d-74125a0964ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.383139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5c7bfc-9667-40ac-bed1-429fa86e2986 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.419115] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f8564b-f4f5-4ea4-90ed-e9afe3b38d41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.431263] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107c7644-c32a-4a4e-932c-17b01d3cf62d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.435345] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776427, 'name': PowerOffVM_Task, 'duration_secs': 0.236272} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.435635] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1706.435823] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1706.436417] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96ad30cc-7e28-46f0-86ae-a8d37a9985fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.446256] env[62508]: DEBUG nova.compute.provider_tree [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.530795] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.545129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.555416] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1706.555708] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1706.555926] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] 9283494f-d8e2-4077-9e4d-57ee4786c3c7 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1706.556225] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d90e2cc-db4f-4569-9b7f-fbdab30de99e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.563423] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1706.563423] env[62508]: value = "task-1776429" [ 1706.563423] env[62508]: _type = "Task" [ 1706.563423] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.572009] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.591257] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-e478855d-e9c7-4abc-8e22-a4b2eb0c7310" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.592797] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1706.592975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.593184] env[62508]: DEBUG nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1706.628473] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.628771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.628960] env[62508]: INFO nova.compute.manager [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Shelving [ 1706.721842] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776424, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.456994} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.722125] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 806102ec-7622-4770-91c9-8c5723893dec/806102ec-7622-4770-91c9-8c5723893dec.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.722343] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.722594] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-964233b4-08ac-4e78-b234-872a642b9cfd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.729878] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1706.729878] env[62508]: value = "task-1776430" [ 1706.729878] env[62508]: _type = "Task" [ 1706.729878] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.738936] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.879094] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b2bb5ad7-d8d6-4e52-bd54-d5b4eaa88cb4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.589s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.982587] env[62508]: DEBUG nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1706.982967] env[62508]: DEBUG nova.compute.provider_tree [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 134 to 135 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1706.983265] env[62508]: DEBUG nova.compute.provider_tree [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1707.074792] env[62508]: DEBUG oslo_vmware.api [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14868} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.075102] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1707.075304] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1707.075486] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1707.075665] env[62508]: INFO nova.compute.manager [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1707.075895] env[62508]: DEBUG oslo.service.loopingcall [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.076113] env[62508]: DEBUG nova.compute.manager [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1707.076209] env[62508]: DEBUG nova.network.neutron [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1707.095993] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1707.096350] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92337ade-f5f1-4104-a9dd-82d05f3fd06a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.105629] env[62508]: DEBUG oslo_vmware.api [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1707.105629] env[62508]: value = "task-1776431" [ 1707.105629] env[62508]: _type = "Task" [ 1707.105629] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.114233] env[62508]: DEBUG oslo_vmware.api [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776431, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.140708] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1707.140708] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0015b0ea-98b0-47c5-bc04-011c436f991d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.143458] env[62508]: WARNING nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1707.151106] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1707.151106] env[62508]: value = "task-1776432" [ 1707.151106] env[62508]: _type = "Task" [ 1707.151106] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.164511] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.239988] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215511} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.240389] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.241363] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e448b4-e9fa-4b5d-9c60-650f6ca5e738 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.276657] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 806102ec-7622-4770-91c9-8c5723893dec/806102ec-7622-4770-91c9-8c5723893dec.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.278555] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b1b9c01-bf39-4efb-bb19-af179308a8e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.307730] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1707.307730] env[62508]: value = "task-1776433" [ 1707.307730] env[62508]: _type = "Task" [ 1707.307730] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.317774] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.459875] env[62508]: DEBUG nova.compute.manager [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-plugged-09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.460124] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.460922] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.465020] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.465020] env[62508]: DEBUG nova.compute.manager [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] No waiting events found dispatching network-vif-plugged-09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1707.465020] env[62508]: WARNING nova.compute.manager [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received unexpected event network-vif-plugged-09990164-6bb3-4ed4-ba3a-f67204a82380 for instance with vm_state active and task_state None. [ 1707.465020] env[62508]: DEBUG nova.compute.manager [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-changed-09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.465020] env[62508]: DEBUG nova.compute.manager [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing instance network info cache due to event network-changed-09990164-6bb3-4ed4-ba3a-f67204a82380. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1707.465020] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.490505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.902s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.493314] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.830s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.493492] env[62508]: DEBUG nova.objects.instance [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lazy-loading 'resources' on Instance uuid a0245a18-638d-4c32-bea2-456408b5e001 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1707.523351] env[62508]: INFO nova.scheduler.client.report [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleted allocations for instance 73841c12-1ae9-46a5-bfe0-e0f82877667c [ 1707.619390] env[62508]: DEBUG oslo_vmware.api [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776431, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.667393] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776432, 'name': PowerOffVM_Task, 'duration_secs': 0.222899} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.667731] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1707.668577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b407906-e836-43f5-b9fe-bd2ced9e814a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.673875] env[62508]: DEBUG nova.compute.manager [req-653ded0d-f0d5-4656-8a3c-d775cce97948 req-ae128a6f-f0a2-4821-b77a-974b78c03f0c service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Received event network-vif-deleted-47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.674326] env[62508]: INFO nova.compute.manager [req-653ded0d-f0d5-4656-8a3c-d775cce97948 req-ae128a6f-f0a2-4821-b77a-974b78c03f0c service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Neutron deleted interface 47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8; detaching it from the instance and deleting it from the info cache [ 1707.674523] env[62508]: DEBUG nova.network.neutron [req-653ded0d-f0d5-4656-8a3c-d775cce97948 req-ae128a6f-f0a2-4821-b77a-974b78c03f0c service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.697070] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a020117f-1e41-4b13-8264-d221caa89d9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.822032] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776433, 'name': ReconfigVM_Task, 'duration_secs': 0.320078} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.822032] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 806102ec-7622-4770-91c9-8c5723893dec/806102ec-7622-4770-91c9-8c5723893dec.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1707.822032] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-128a8b3d-0bc4-498c-aed7-6dd08dd3f037 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.830060] env[62508]: DEBUG nova.network.neutron [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09990164-6bb3-4ed4-ba3a-f67204a82380", "address": "fa:16:3e:77:b0:c9", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09990164-6b", "ovs_interfaceid": "09990164-6bb3-4ed4-ba3a-f67204a82380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.836475] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1707.836475] env[62508]: value = "task-1776434" [ 1707.836475] env[62508]: _type = "Task" [ 1707.836475] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.847151] env[62508]: DEBUG nova.network.neutron [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.848455] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776434, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.037999] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b311e8a0-1497-4c10-9499-81863f1200d2 tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "73841c12-1ae9-46a5-bfe0-e0f82877667c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.057s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.123328] env[62508]: DEBUG oslo_vmware.api [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776431, 'name': PowerOnVM_Task, 'duration_secs': 0.536123} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.123328] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1708.123328] env[62508]: DEBUG nova.compute.manager [None req-c51f0bd3-8730-44e2-96fd-f644be7bddf7 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1708.123328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7293dc5c-1df9-43ea-ba47-b1ca0f2dcf6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.180455] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00c5fe06-e14b-4669-af6d-14a741cda704 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.194308] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e2cd18-921c-4f23-887d-61f63eeb0e0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.215043] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1708.215043] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c3f4430c-a205-4db8-9a59-bbcb3d5c7f06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.222696] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1708.222696] env[62508]: value = "task-1776435" [ 1708.222696] env[62508]: _type = "Task" [ 1708.222696] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.240095] env[62508]: DEBUG nova.compute.manager [req-653ded0d-f0d5-4656-8a3c-d775cce97948 req-ae128a6f-f0a2-4821-b77a-974b78c03f0c service nova] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Detach interface failed, port_id=47aa7e28-8aa9-462b-a2c6-2d530b3f5ec8, reason: Instance 9283494f-d8e2-4077-9e4d-57ee4786c3c7 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1708.253718] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776435, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.339066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.339319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.339319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.339696] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.339912] env[62508]: DEBUG nova.network.neutron [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing network info cache for port 09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1708.342339] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea2fd9-4ccd-4adc-a48e-5feb6645b262 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.352839] env[62508]: INFO nova.compute.manager [-] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Took 1.28 seconds to deallocate network for instance. [ 1708.353675] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c00127-6b68-4657-b4bb-fa7703f3a8df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.359054] env[62508]: DEBUG nova.objects.instance [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1708.377694] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1708.377928] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1708.378104] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.378328] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1708.378494] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.378769] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1708.378856] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1708.378999] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1708.379185] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1708.379349] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1708.379527] env[62508]: DEBUG nova.virt.hardware [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1708.388430] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfiguring VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1708.395624] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e67ddd5-e159-4cd5-bb2c-9fa7eec24f91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.409633] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776434, 'name': Rename_Task, 'duration_secs': 0.161247} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.414255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6125ada-25e2-4451-be64-c51881484108 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.415891] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1708.417479] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57951b4e-d211-41e6-b3ba-c8926ffb7732 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.422924] env[62508]: DEBUG oslo_vmware.api [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1708.422924] env[62508]: value = "task-1776437" [ 1708.422924] env[62508]: _type = "Task" [ 1708.422924] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.474246] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6575d84-5fab-4375-823c-9aa21503250a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.478808] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1708.478808] env[62508]: value = "task-1776438" [ 1708.478808] env[62508]: _type = "Task" [ 1708.478808] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.494018] env[62508]: DEBUG oslo_vmware.api [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776437, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.494881] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3923d1a6-3937-49e6-8d77-096f9474d4a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.506753] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776438, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.519515] env[62508]: DEBUG nova.compute.provider_tree [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.737182] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776435, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.893118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.893303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.893491] env[62508]: DEBUG nova.network.neutron [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1708.893695] env[62508]: DEBUG nova.objects.instance [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'info_cache' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1708.913110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.975940] env[62508]: DEBUG oslo_vmware.api [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.990918] env[62508]: DEBUG oslo_vmware.api [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776438, 'name': PowerOnVM_Task, 'duration_secs': 0.550484} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.991204] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1708.991411] env[62508]: INFO nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1708.991730] env[62508]: DEBUG nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1708.992898] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b161ac-f879-445b-989a-e04a1f9dda98 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.022545] env[62508]: DEBUG nova.scheduler.client.report [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1709.125660] env[62508]: DEBUG nova.network.neutron [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updated VIF entry in instance network info cache for port 09990164-6bb3-4ed4-ba3a-f67204a82380. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1709.126215] env[62508]: DEBUG nova.network.neutron [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09990164-6bb3-4ed4-ba3a-f67204a82380", "address": "fa:16:3e:77:b0:c9", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09990164-6b", "ovs_interfaceid": "09990164-6bb3-4ed4-ba3a-f67204a82380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1709.235150] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776435, 'name': CreateSnapshot_Task, 'duration_secs': 0.572938} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.235434] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1709.236350] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff48cefd-04d2-472d-a3aa-366b753edff1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.397518] env[62508]: DEBUG nova.objects.base [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Object Instance<24091abb-f71f-4528-8fc5-b97725cf079e> lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1709.476848] env[62508]: DEBUG oslo_vmware.api [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776437, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.510834] env[62508]: INFO nova.compute.manager [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Took 19.50 seconds to build instance. [ 1709.527397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.529721] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.530s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.529909] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.530101] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1709.530456] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.326s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.531855] env[62508]: INFO nova.compute.claims [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1709.535841] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6394e4de-5752-4ab7-a7bc-26c8763e299a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.545610] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059f40de-4e5e-4e6a-8031-d2adc2d95b87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.560897] env[62508]: INFO nova.scheduler.client.report [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Deleted allocations for instance a0245a18-638d-4c32-bea2-456408b5e001 [ 1709.562922] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d117efb-81d5-4a88-b85b-fd9c37b00708 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.572746] env[62508]: DEBUG nova.compute.manager [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1709.572946] env[62508]: DEBUG nova.compute.manager [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing instance network info cache due to event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1709.573297] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.573451] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.573617] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1709.582697] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa31f403-a1a2-45ec-85d6-f863d5073274 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.616661] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178586MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1709.616827] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.629429] env[62508]: DEBUG oslo_concurrency.lockutils [req-f0867e31-8e18-4bc8-9647-7fcd76d4d899 req-394fdecd-7d0b-4d88-a66c-8deaa262c729 service nova] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.757948] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1709.757948] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-43d0a03c-19a5-4285-b9d4-b5bb008923a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.768912] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1709.768912] env[62508]: value = "task-1776439" [ 1709.768912] env[62508]: _type = "Task" [ 1709.768912] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.777347] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.850331] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.850689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.850935] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.851205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.851387] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.855935] env[62508]: INFO nova.compute.manager [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Terminating instance [ 1709.857906] env[62508]: DEBUG nova.compute.manager [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1709.858113] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.859096] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bff2ae-c684-4dad-b414-cdae762b4de5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.867720] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.867987] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e78e956-d499-4e47-823a-4b1b6aa2f12c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.875350] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1709.875350] env[62508]: value = "task-1776440" [ 1709.875350] env[62508]: _type = "Task" [ 1709.875350] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.886105] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.978753] env[62508]: DEBUG oslo_vmware.api [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776437, 'name': ReconfigVM_Task, 'duration_secs': 1.060025} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.979613] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.979771] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfigured VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1710.013224] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6010a6be-8048-4737-9bd7-5e3cc23ff7b1 tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.083982] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5bc24d35-865f-4e35-b69b-2bc439d02fde tempest-ServersTestManualDisk-316539111 tempest-ServersTestManualDisk-316539111-project-member] Lock "a0245a18-638d-4c32-bea2-456408b5e001" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.684s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.285451] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.289227] env[62508]: DEBUG nova.network.neutron [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.387244] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776440, 'name': PowerOffVM_Task, 'duration_secs': 0.25122} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.390231] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1710.390406] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1710.390850] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1643c2fe-4fe0-4c1d-a1a5-32b6559b9730 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.485212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6f3f5749-73c3-4a32-abf1-35633d9f3d00 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.629s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.597876] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updated VIF entry in instance network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1710.598271] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.783393] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.793218] env[62508]: DEBUG oslo_concurrency.lockutils [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.834257] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3cdd2a-e477-4620-88e2-4b1c5bdd1e96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.842249] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236bb0b5-05a6-4154-a523-3cf92c2a5307 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.877173] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b1157d-ec95-4fcb-bcdd-2c2123aa4512 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.881055] env[62508]: DEBUG nova.compute.manager [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Received event network-changed-5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1710.881421] env[62508]: DEBUG nova.compute.manager [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Refreshing instance network info cache due to event network-changed-5c6d20a1-1c14-4874-b295-9828a9172d8d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1710.881739] env[62508]: DEBUG oslo_concurrency.lockutils [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] Acquiring lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.881996] env[62508]: DEBUG oslo_concurrency.lockutils [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] Acquired lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.882270] env[62508]: DEBUG nova.network.neutron [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Refreshing network info cache for port 5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1710.894668] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bfe2c4-f5db-4f73-98bd-92c6b7bebd58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.907092] env[62508]: DEBUG nova.compute.provider_tree [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.101999] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.102341] env[62508]: DEBUG nova.compute.manager [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.102519] env[62508]: DEBUG nova.compute.manager [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing instance network info cache due to event network-changed-458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1711.102734] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Acquiring lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.102948] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Acquired lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.103142] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Refreshing network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1711.285037] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.299017] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1711.299017] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e13cf2bc-dead-43f9-9180-f477210fc907 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.307381] env[62508]: DEBUG oslo_vmware.api [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1711.307381] env[62508]: value = "task-1776442" [ 1711.307381] env[62508]: _type = "Task" [ 1711.307381] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.320334] env[62508]: DEBUG oslo_vmware.api [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.412027] env[62508]: DEBUG nova.scheduler.client.report [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.787730] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.819766] env[62508]: DEBUG oslo_vmware.api [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776442, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.921442] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.922734] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1711.924759] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.140s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.924977] env[62508]: DEBUG nova.objects.instance [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'resources' on Instance uuid 4d24bacc-48c4-4649-bb29-fcae2cf77782 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1711.962426] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updated VIF entry in instance network info cache for port 458dc468-1ae9-4f09-b0e2-4c866362fb80. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1711.962896] env[62508]: DEBUG nova.network.neutron [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [{"id": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "address": "fa:16:3e:f7:69:71", "network": {"id": "1469693f-972e-4bc4-8302-f159fa7e79b8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1766475744-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27e6f55b56be40d2a619f0119aefb2ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap458dc468-1a", "ovs_interfaceid": "458dc468-1ae9-4f09-b0e2-4c866362fb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.082770] env[62508]: DEBUG nova.network.neutron [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updated VIF entry in instance network info cache for port 5c6d20a1-1c14-4874-b295-9828a9172d8d. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1712.083164] env[62508]: DEBUG nova.network.neutron [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updating instance_info_cache with network_info: [{"id": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "address": "fa:16:3e:33:a8:e4", "network": {"id": "472bd29e-33d3-4fb0-915b-13269b954453", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1601635694-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8a9a71f05d4991a6dfaa8aed156e84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6d20a1-1c", "ovs_interfaceid": "5c6d20a1-1c14-4874-b295-9828a9172d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.232373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.232659] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.232871] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.233119] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.233309] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.237754] env[62508]: INFO nova.compute.manager [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Terminating instance [ 1712.244601] env[62508]: DEBUG nova.compute.manager [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1712.244874] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1712.245941] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e274d92a-7564-4ab9-b60c-da48e33090a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.256750] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.257027] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c733a35-9d96-4fb8-a1fb-09bf5500d246 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.265120] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1712.265120] env[62508]: value = "task-1776443" [ 1712.265120] env[62508]: _type = "Task" [ 1712.265120] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.275787] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.286280] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.320673] env[62508]: DEBUG oslo_vmware.api [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776442, 'name': PowerOnVM_Task, 'duration_secs': 0.591125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.320673] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1712.320673] env[62508]: DEBUG nova.compute.manager [None req-31aec693-53f7-4b9d-8ba5-58135dad1d75 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1712.320804] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ed9ac4-b8d9-4853-9235-52d413e357a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.427925] env[62508]: DEBUG nova.compute.utils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1712.429776] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1712.429776] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1712.470092] env[62508]: DEBUG oslo_concurrency.lockutils [req-d14550d5-4f41-4639-a007-ca42fc4f8c9f req-4989e612-f497-47e7-aca5-e168c354c380 service nova] Releasing lock "refresh_cache-aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.478543] env[62508]: DEBUG nova.policy [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1712.586337] env[62508]: DEBUG oslo_concurrency.lockutils [req-abeef8ea-61d5-4986-a847-6fac68ba1d65 req-81317faf-873e-44d1-b511-160af0749ea6 service nova] Releasing lock "refresh_cache-806102ec-7622-4770-91c9-8c5723893dec" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.753129] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b0bd27-68d1-4cab-9ba6-9bfc62f4acfd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.762307] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b149583-2852-40b2-b325-01aacd118a6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.775635] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776443, 'name': PowerOffVM_Task, 'duration_secs': 0.422187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.802111] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1712.802423] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1712.806218] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45e557e8-0ce3-4cd6-871c-9790377e7cf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.808071] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b5eb68-66d6-40e7-a7d9-f4483accf37e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.817050] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.820295] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c886cb-1778-4370-8903-183285432651 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.837751] env[62508]: DEBUG nova.compute.provider_tree [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.933129] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1712.940892] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Successfully created port: e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1713.185759] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-594da101-91d1-4ce2-a09a-bd16f9fcc148" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.186067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-594da101-91d1-4ce2-a09a-bd16f9fcc148" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.186953] env[62508]: DEBUG nova.objects.instance [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1713.313656] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.341505] env[62508]: DEBUG nova.scheduler.client.report [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1713.782208] env[62508]: DEBUG nova.objects.instance [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1713.812656] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.847394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.850613] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.320s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.851157] env[62508]: DEBUG nova.objects.instance [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lazy-loading 'resources' on Instance uuid 9bafe786-5815-4871-8405-558cac7b3654 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1713.870596] env[62508]: INFO nova.scheduler.client.report [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted allocations for instance 4d24bacc-48c4-4649-bb29-fcae2cf77782 [ 1713.943485] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1713.971363] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1713.971363] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1713.971571] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1713.971651] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1713.971814] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1713.971966] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1713.972188] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1713.972352] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1713.972520] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1713.972686] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1713.972863] env[62508]: DEBUG nova.virt.hardware [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1713.973800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35472eab-de56-470d-ab9e-b13aa4db326b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.982936] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f047431-6f05-4750-9c7c-7311ccac8a6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.284502] env[62508]: DEBUG nova.objects.base [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1714.284820] env[62508]: DEBUG nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1714.313558] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.362800] env[62508]: DEBUG nova.policy [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1714.380197] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5aa74e59-84af-4e04-a881-ab05cf3782eb tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "4d24bacc-48c4-4649-bb29-fcae2cf77782" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.034s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.605561] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f272df68-5e2c-4705-a15c-85492bbe8683 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.613948] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c209bb6-68e8-424e-b83f-0f14230b3de8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.646052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b034425d-d76e-42b4-81f6-3af4a8736c57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.654373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56822aed-a94c-461a-bc55-59af27ebc5bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.668561] env[62508]: DEBUG nova.compute.provider_tree [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1714.814059] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.174035] env[62508]: DEBUG nova.scheduler.client.report [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1715.316837] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.677184] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.827s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.679639] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.135s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.679880] env[62508]: DEBUG nova.objects.instance [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lazy-loading 'resources' on Instance uuid 2a564423-a2fd-4873-885f-37777bdd83eb {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1715.695632] env[62508]: INFO nova.scheduler.client.report [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted allocations for instance 9bafe786-5815-4871-8405-558cac7b3654 [ 1715.815872] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.874796] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.875049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.875263] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleting the datastore file [datastore1] e478855d-e9c7-4abc-8e22-a4b2eb0c7310 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.875556] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39eaae06-6067-4176-837a-1ee42382bf09 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.884699] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1715.884699] env[62508]: value = "task-1776445" [ 1715.884699] env[62508]: _type = "Task" [ 1715.884699] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.894332] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.927030] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.927368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.927613] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleting the datastore file [datastore1] deee2c81-4d2c-47d3-aae6-ef829d59c644 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.928156] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60b78316-f1ff-499f-a5bc-5e04bb7d7a00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.936721] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for the task: (returnval){ [ 1715.936721] env[62508]: value = "task-1776446" [ 1715.936721] env[62508]: _type = "Task" [ 1715.936721] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.946519] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.205308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88046dd8-854c-4390-97a3-3f6396bafcc6 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "9bafe786-5815-4871-8405-558cac7b3654" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.265s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.316194] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.396240] env[62508]: DEBUG oslo_vmware.api [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204772} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.398976] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.399543] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.399543] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.399703] env[62508]: INFO nova.compute.manager [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 6.54 seconds to destroy the instance on the hypervisor. [ 1716.399965] env[62508]: DEBUG oslo.service.loopingcall [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.400415] env[62508]: DEBUG nova.compute.manager [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1716.400566] env[62508]: DEBUG nova.network.neutron [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.422324] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d4325-d7c1-45f6-a21b-bb90b1f9d551 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.431376] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce9cb26-0f72-4b59-9803-040c5ceb8826 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.472124] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b07336-a578-4a12-b49d-36d22f15753f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.478310] env[62508]: DEBUG oslo_vmware.api [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Task: {'id': task-1776446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283328} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.478812] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.479093] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.479321] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.479504] env[62508]: INFO nova.compute.manager [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Took 4.23 seconds to destroy the instance on the hypervisor. [ 1716.479768] env[62508]: DEBUG oslo.service.loopingcall [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.479986] env[62508]: DEBUG nova.compute.manager [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1716.480105] env[62508]: DEBUG nova.network.neutron [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.485666] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad65ef3-8f57-4213-b5e3-2b138297be01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.500779] env[62508]: DEBUG nova.compute.provider_tree [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.676311] env[62508]: DEBUG nova.compute.manager [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1716.818168] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.897962] env[62508]: DEBUG nova.compute.manager [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-plugged-594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1716.898187] env[62508]: DEBUG oslo_concurrency.lockutils [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.898413] env[62508]: DEBUG oslo_concurrency.lockutils [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.898621] env[62508]: DEBUG oslo_concurrency.lockutils [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.898854] env[62508]: DEBUG nova.compute.manager [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] No waiting events found dispatching network-vif-plugged-594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1716.899875] env[62508]: WARNING nova.compute.manager [req-3993ce53-0488-4686-86a5-48121dedddd3 req-6e2b198c-b4f1-4fe9-81b7-46f52b64f99c service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received unexpected event network-vif-plugged-594da101-91d1-4ce2-a09a-bd16f9fcc148 for instance with vm_state active and task_state None. [ 1717.006490] env[62508]: DEBUG nova.scheduler.client.report [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1717.069441] env[62508]: DEBUG nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Successfully updated port: 594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.136831] env[62508]: DEBUG nova.compute.manager [req-1ff94ff5-8721-4c6e-aaa7-9c6edf6b266d req-f4f31e72-d20b-4b27-93a3-82d04207feb9 service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Received event network-vif-deleted-69b14f39-9f95-4e4c-a3dd-437cf82d8fa0 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1717.137260] env[62508]: INFO nova.compute.manager [req-1ff94ff5-8721-4c6e-aaa7-9c6edf6b266d req-f4f31e72-d20b-4b27-93a3-82d04207feb9 service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Neutron deleted interface 69b14f39-9f95-4e4c-a3dd-437cf82d8fa0; detaching it from the instance and deleting it from the info cache [ 1717.137260] env[62508]: DEBUG nova.network.neutron [req-1ff94ff5-8721-4c6e-aaa7-9c6edf6b266d req-f4f31e72-d20b-4b27-93a3-82d04207feb9 service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.196948] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.239008] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Successfully updated port: e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.320652] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776439, 'name': CloneVM_Task, 'duration_secs': 7.493643} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.321087] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Created linked-clone VM from snapshot [ 1717.324685] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b71832-4992-4ac4-82ee-93e91081b7b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.331263] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Uploading image cf41ffb1-ea55-4c93-bf70-33b17e44f550 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1717.360152] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1717.360152] env[62508]: value = "vm-368788" [ 1717.360152] env[62508]: _type = "VirtualMachine" [ 1717.360152] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1717.360443] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6dfec4f0-9235-4f99-8ed9-f362326ef41e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.368998] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease: (returnval){ [ 1717.368998] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c2603-9f54-1399-0f81-6f6824f0e3c4" [ 1717.368998] env[62508]: _type = "HttpNfcLease" [ 1717.368998] env[62508]: } obtained for exporting VM: (result){ [ 1717.368998] env[62508]: value = "vm-368788" [ 1717.368998] env[62508]: _type = "VirtualMachine" [ 1717.368998] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1717.369338] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the lease: (returnval){ [ 1717.369338] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c2603-9f54-1399-0f81-6f6824f0e3c4" [ 1717.369338] env[62508]: _type = "HttpNfcLease" [ 1717.369338] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1717.377110] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1717.377110] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c2603-9f54-1399-0f81-6f6824f0e3c4" [ 1717.377110] env[62508]: _type = "HttpNfcLease" [ 1717.377110] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1717.508763] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.511068] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.598s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.512020] env[62508]: DEBUG nova.objects.instance [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lazy-loading 'resources' on Instance uuid 9283494f-d8e2-4077-9e4d-57ee4786c3c7 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.531496] env[62508]: INFO nova.scheduler.client.report [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted allocations for instance 2a564423-a2fd-4873-885f-37777bdd83eb [ 1717.535971] env[62508]: DEBUG nova.network.neutron [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.544551] env[62508]: DEBUG nova.network.neutron [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.571733] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.571931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.572206] env[62508]: DEBUG nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1717.641743] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63813dc6-705f-48c3-8b50-922ef143f601 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.657078] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458acbff-cb2e-48ea-b38a-275279723599 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.692980] env[62508]: DEBUG nova.compute.manager [req-1ff94ff5-8721-4c6e-aaa7-9c6edf6b266d req-f4f31e72-d20b-4b27-93a3-82d04207feb9 service nova] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Detach interface failed, port_id=69b14f39-9f95-4e4c-a3dd-437cf82d8fa0, reason: Instance deee2c81-4d2c-47d3-aae6-ef829d59c644 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1717.741599] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.741754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.741914] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1717.878696] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1717.878696] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c2603-9f54-1399-0f81-6f6824f0e3c4" [ 1717.878696] env[62508]: _type = "HttpNfcLease" [ 1717.878696] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1717.879134] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1717.879134] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521c2603-9f54-1399-0f81-6f6824f0e3c4" [ 1717.879134] env[62508]: _type = "HttpNfcLease" [ 1717.879134] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1717.879751] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411004e5-a12c-48f2-aa33-dcc867697fb1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.888032] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1717.888103] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1717.996077] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f770d27-9913-4f02-bbf2-106d1a1ce3c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.041062] env[62508]: INFO nova.compute.manager [-] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Took 1.64 seconds to deallocate network for instance. [ 1718.042790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c3db3aee-d41a-4388-a617-7cd72ccb6666 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "2a564423-a2fd-4873-885f-37777bdd83eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.048194] env[62508]: INFO nova.compute.manager [-] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Took 1.57 seconds to deallocate network for instance. [ 1718.109785] env[62508]: WARNING nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1718.109982] env[62508]: WARNING nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1718.251799] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8f0aa5-1b35-4890-b40c-98a731b51c7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.261695] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a7c827-b348-4dd3-a42a-efa98b90cde8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.300839] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1718.305378] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e72612-1a23-4d18-9013-8839f69b7686 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.314759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165d5869-63d3-4548-a355-ffb6103e9e0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.332197] env[62508]: DEBUG nova.compute.provider_tree [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.495167] env[62508]: DEBUG nova.network.neutron [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updating instance_info_cache with network_info: [{"id": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "address": "fa:16:3e:d6:c4:d5", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4f1c3ea-4f", "ovs_interfaceid": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.550037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.554297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.661551] env[62508]: DEBUG nova.network.neutron [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09990164-6bb3-4ed4-ba3a-f67204a82380", "address": "fa:16:3e:77:b0:c9", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09990164-6b", "ovs_interfaceid": "09990164-6bb3-4ed4-ba3a-f67204a82380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "address": "fa:16:3e:d1:cd:b5", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap594da101-91", "ovs_interfaceid": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.840209] env[62508]: DEBUG nova.scheduler.client.report [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1719.001881] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.003393] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Instance network_info: |[{"id": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "address": "fa:16:3e:d6:c4:d5", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4f1c3ea-4f", "ovs_interfaceid": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1719.004076] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:c4:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1719.012732] env[62508]: DEBUG oslo.service.loopingcall [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.013408] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1719.013524] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92a8dea4-72df-4fdc-808c-e04ac5fb55fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.036740] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1719.036740] env[62508]: value = "task-1776448" [ 1719.036740] env[62508]: _type = "Task" [ 1719.036740] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.047969] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776448, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.164765] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.165860] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.166144] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.167897] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51862f64-d395-4ec4-a9b3-e772ddbdf507 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.188483] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1719.188850] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1719.189094] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1719.189369] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1719.189559] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1719.189745] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1719.190029] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1719.190212] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1719.190436] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1719.190653] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1719.190878] env[62508]: DEBUG nova.virt.hardware [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1719.197572] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfiguring VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1719.198016] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11fcea10-3d73-445a-841b-ccb884dd813f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.218077] env[62508]: DEBUG oslo_vmware.api [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1719.218077] env[62508]: value = "task-1776449" [ 1719.218077] env[62508]: _type = "Task" [ 1719.218077] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.228133] env[62508]: DEBUG oslo_vmware.api [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.346608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.834s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.348347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.731s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.366438] env[62508]: INFO nova.scheduler.client.report [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted allocations for instance 9283494f-d8e2-4077-9e4d-57ee4786c3c7 [ 1719.376065] env[62508]: DEBUG nova.compute.manager [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Received event network-vif-deleted-ec6adf65-fbdf-4276-8e19-eb416336bbff {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.376404] env[62508]: DEBUG nova.compute.manager [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-changed-594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.376582] env[62508]: DEBUG nova.compute.manager [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing instance network info cache due to event network-changed-594da101-91d1-4ce2-a09a-bd16f9fcc148. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1719.376854] env[62508]: DEBUG oslo_concurrency.lockutils [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.377099] env[62508]: DEBUG oslo_concurrency.lockutils [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.377358] env[62508]: DEBUG nova.network.neutron [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Refreshing network info cache for port 594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.498469] env[62508]: DEBUG nova.compute.manager [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Received event network-vif-plugged-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.499721] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.499721] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.499721] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.499843] env[62508]: DEBUG nova.compute.manager [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] No waiting events found dispatching network-vif-plugged-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1719.500086] env[62508]: WARNING nova.compute.manager [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Received unexpected event network-vif-plugged-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 for instance with vm_state building and task_state spawning. [ 1719.500670] env[62508]: DEBUG nova.compute.manager [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Received event network-changed-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.501054] env[62508]: DEBUG nova.compute.manager [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Refreshing instance network info cache due to event network-changed-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1719.501054] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Acquiring lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.501216] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Acquired lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.501364] env[62508]: DEBUG nova.network.neutron [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Refreshing network info cache for port e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.547839] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776448, 'name': CreateVM_Task, 'duration_secs': 0.500507} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.548607] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1719.549823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.549823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.550554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1719.550554] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2968355e-8725-4deb-8db6-24eac9e240c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.557150] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1719.557150] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af761-6e08-16f7-d291-7a7aa51f221d" [ 1719.557150] env[62508]: _type = "Task" [ 1719.557150] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.566969] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af761-6e08-16f7-d291-7a7aa51f221d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.631351] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.632187] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.659719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.660022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.732081] env[62508]: DEBUG oslo_vmware.api [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.884906] env[62508]: DEBUG oslo_concurrency.lockutils [None req-326b40ad-b262-425f-9841-bc7bd2b1ae9d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "9283494f-d8e2-4077-9e4d-57ee4786c3c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.002s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.069104] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af761-6e08-16f7-d291-7a7aa51f221d, 'name': SearchDatastore_Task, 'duration_secs': 0.014246} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.072147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.072147] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1720.072147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.072350] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.072455] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.074674] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04c2effd-39ea-4e2e-a687-c80341c5ace9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.086157] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.086356] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1720.087130] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd08cb8c-2416-4ebc-8227-89c21541f4b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.098400] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1720.098400] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c35470-50a7-a1c3-3cc2-d344dae2144b" [ 1720.098400] env[62508]: _type = "Task" [ 1720.098400] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.107118] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c35470-50a7-a1c3-3cc2-d344dae2144b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.113185] env[62508]: DEBUG nova.network.neutron [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updated VIF entry in instance network info cache for port 594da101-91d1-4ce2-a09a-bd16f9fcc148. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.113664] env[62508]: DEBUG nova.network.neutron [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09990164-6bb3-4ed4-ba3a-f67204a82380", "address": "fa:16:3e:77:b0:c9", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09990164-6b", "ovs_interfaceid": "09990164-6bb3-4ed4-ba3a-f67204a82380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "address": "fa:16:3e:d1:cd:b5", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap594da101-91", "ovs_interfaceid": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.135125] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1720.162565] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1720.230161] env[62508]: DEBUG oslo_vmware.api [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776449, 'name': ReconfigVM_Task, 'duration_secs': 0.751141} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.231267] env[62508]: DEBUG nova.network.neutron [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updated VIF entry in instance network info cache for port e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.231971] env[62508]: DEBUG nova.network.neutron [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updating instance_info_cache with network_info: [{"id": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "address": "fa:16:3e:d6:c4:d5", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4f1c3ea-4f", "ovs_interfaceid": "e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.233449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.233609] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfigured VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1720.362954] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating resource usage from migration e9f19fd5-c421-44e8-bc0f-0cbc96a8fa64 [ 1720.384247] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.384328] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.384529] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.384760] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.384898] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.385114] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance deee2c81-4d2c-47d3-aae6-ef829d59c644 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1720.385332] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 145306d7-f0e8-46c0-b2ab-1c41c208f976 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.385507] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.385629] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f465712f-f65a-4521-90ab-e9f5c5b6de5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.385747] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a058273e-9c68-4d73-9149-ceb60c1c1cda actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.386069] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 24091abb-f71f-4528-8fc5-b97725cf079e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.386274] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1720.386397] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 806102ec-7622-4770-91c9-8c5723893dec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.386577] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance f3e1c48d-9aaf-415f-8234-82a71bb469ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.386854] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Migration e9f19fd5-c421-44e8-bc0f-0cbc96a8fa64 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1720.386996] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 95a289ac-3178-45ea-80d2-905b9af54f3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1720.460920] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.461678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.610012] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c35470-50a7-a1c3-3cc2-d344dae2144b, 'name': SearchDatastore_Task, 'duration_secs': 0.013901} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.610845] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b95cba20-1d55-414d-901d-aa5863f72fe9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.616845] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1720.616845] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526583d7-e36d-ac96-98ff-e386bfd248bd" [ 1720.616845] env[62508]: _type = "Task" [ 1720.616845] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.620515] env[62508]: DEBUG oslo_concurrency.lockutils [req-c954851c-c26f-49d2-b8ce-086610e335b2 req-ad11fcf0-c87a-49e4-9ace-21b7d9fa18c7 service nova] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.626557] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526583d7-e36d-ac96-98ff-e386bfd248bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.657315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.680443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.737445] env[62508]: DEBUG oslo_concurrency.lockutils [req-4d1ae0ce-74a2-4e6e-b1b2-60aa92a1b67d req-cf320c14-1e6b-410a-a520-729f572ed49d service nova] Releasing lock "refresh_cache-f3e1c48d-9aaf-415f-8234-82a71bb469ee" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.740165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-72d3b4d6-29cd-41fa-b3d0-5a2c29031d49 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-594da101-91d1-4ce2-a09a-bd16f9fcc148" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.554s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.881503] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.881734] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.881993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.882229] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.882408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.884656] env[62508]: INFO nova.compute.manager [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Terminating instance [ 1720.886647] env[62508]: DEBUG nova.compute.manager [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1720.886853] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1720.887742] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97b0e73-561d-4820-aff6-cea1b7bbc7ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.891437] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b4427ba0-4dcf-4b21-a584-a7fee560f135 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1720.899477] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1720.899772] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8aabe70d-7a99-472e-88c2-fbb53e2f0472 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.908383] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1720.908383] env[62508]: value = "task-1776450" [ 1720.908383] env[62508]: _type = "Task" [ 1720.908383] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.919350] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.964566] env[62508]: INFO nova.compute.manager [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Detaching volume 9420607e-8de8-4e29-a868-732bd5be0158 [ 1721.005067] env[62508]: INFO nova.virt.block_device [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Attempting to driver detach volume 9420607e-8de8-4e29-a868-732bd5be0158 from mountpoint /dev/sdb [ 1721.005311] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1721.005532] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1721.006435] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64ef5c1-a45f-4709-9690-cbc2bb18f5f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.029562] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bde0acd-4a67-4205-9e0a-87ac30b5b9d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.038540] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4121c9-2a75-41a2-8677-667cb7469651 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.061516] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f6cca5-1c21-4bc5-9a95-9440dc2e2ac0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.080182] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] The volume has not been displaced from its original location: [datastore1] volume-9420607e-8de8-4e29-a868-732bd5be0158/volume-9420607e-8de8-4e29-a868-732bd5be0158.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1721.086088] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfiguring VM instance instance-00000044 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1721.086505] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b22a447-f378-4ead-9cfa-209619e20704 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.110669] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1721.110669] env[62508]: value = "task-1776451" [ 1721.110669] env[62508]: _type = "Task" [ 1721.110669] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.122485] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776451, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.135508] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526583d7-e36d-ac96-98ff-e386bfd248bd, 'name': SearchDatastore_Task, 'duration_secs': 0.013583} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.135811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.136129] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f3e1c48d-9aaf-415f-8234-82a71bb469ee/f3e1c48d-9aaf-415f-8234-82a71bb469ee.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1721.136413] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d472351e-ec5e-4401-81b3-22479a91bb6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.145369] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1721.145369] env[62508]: value = "task-1776452" [ 1721.145369] env[62508]: _type = "Task" [ 1721.145369] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.154889] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.394877] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1721.395239] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1721.395399] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1721.419791] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776450, 'name': PowerOffVM_Task, 'duration_secs': 0.253004} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.422622] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1721.422791] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1721.423569] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0185f07c-e5d4-4ab4-834b-4528c41cbafd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.563571] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1721.563913] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1721.564141] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] f465712f-f65a-4521-90ab-e9f5c5b6de5f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1721.564456] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9d64acb-9587-4936-a174-fbf38a3c8063 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.573308] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1721.573308] env[62508]: value = "task-1776454" [ 1721.573308] env[62508]: _type = "Task" [ 1721.573308] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.587153] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.625991] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776451, 'name': ReconfigVM_Task, 'duration_secs': 0.303929} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.626325] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Reconfigured VM instance instance-00000044 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1721.634503] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c76cf1e-7974-48a5-a319-056169e1655d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.652914] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1721.652914] env[62508]: value = "task-1776455" [ 1721.652914] env[62508]: _type = "Task" [ 1721.652914] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.656738] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776452, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.669654] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.691390] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72f3959-18ad-477a-b80b-b0b5f73b3a1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.700810] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e91f979-879f-4737-adf3-18bbb1b5c587 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.733444] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dae4c45-163e-4ea8-8984-a089a78a0398 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.742039] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e17b39-0fbb-4260-95df-f4909781cea7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.760114] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.090529] env[62508]: DEBUG oslo_vmware.api [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354673} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.091299] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1722.091546] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1722.091762] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1722.092061] env[62508]: INFO nova.compute.manager [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1722.092374] env[62508]: DEBUG oslo.service.loopingcall [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.092576] env[62508]: DEBUG nova.compute.manager [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1722.092683] env[62508]: DEBUG nova.network.neutron [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1722.156210] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605244} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.156513] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] f3e1c48d-9aaf-415f-8234-82a71bb469ee/f3e1c48d-9aaf-415f-8234-82a71bb469ee.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.156731] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.157040] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd3fe09d-9365-4c67-9766-0d87169d7e00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.168028] env[62508]: DEBUG oslo_vmware.api [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776455, 'name': ReconfigVM_Task, 'duration_secs': 0.175897} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.169464] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368760', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'name': 'volume-9420607e-8de8-4e29-a868-732bd5be0158', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '145306d7-f0e8-46c0-b2ab-1c41c208f976', 'attached_at': '', 'detached_at': '', 'volume_id': '9420607e-8de8-4e29-a868-732bd5be0158', 'serial': '9420607e-8de8-4e29-a868-732bd5be0158'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1722.171803] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1722.171803] env[62508]: value = "task-1776456" [ 1722.171803] env[62508]: _type = "Task" [ 1722.171803] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.264013] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.410989] env[62508]: DEBUG nova.compute.manager [req-e6a1864c-2735-4457-8cdf-6b6f70138e02 req-05836da6-b7d6-4015-b630-0f2708bb7dee service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Received event network-vif-deleted-934ffcfc-e193-4a25-9167-be27718f24af {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1722.411353] env[62508]: INFO nova.compute.manager [req-e6a1864c-2735-4457-8cdf-6b6f70138e02 req-05836da6-b7d6-4015-b630-0f2708bb7dee service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Neutron deleted interface 934ffcfc-e193-4a25-9167-be27718f24af; detaching it from the instance and deleting it from the info cache [ 1722.411596] env[62508]: DEBUG nova.network.neutron [req-e6a1864c-2735-4457-8cdf-6b6f70138e02 req-05836da6-b7d6-4015-b630-0f2708bb7dee service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.585758] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-09990164-6bb3-4ed4-ba3a-f67204a82380" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.586077] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-09990164-6bb3-4ed4-ba3a-f67204a82380" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.682545] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776456, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089392} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.684029] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1722.684029] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d200d7d-84d5-4361-ad9e-01f2bc323b81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.708998] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] f3e1c48d-9aaf-415f-8234-82a71bb469ee/f3e1c48d-9aaf-415f-8234-82a71bb469ee.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1722.709355] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c9142bd-9ba8-4f59-8828-e4f4a3215e5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.725408] env[62508]: DEBUG nova.objects.instance [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'flavor' on Instance uuid 145306d7-f0e8-46c0-b2ab-1c41c208f976 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.732734] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1722.732734] env[62508]: value = "task-1776457" [ 1722.732734] env[62508]: _type = "Task" [ 1722.732734] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.741369] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.771032] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1722.771032] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.423s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.771274] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.574s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.772950] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.773135] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1722.889950] env[62508]: DEBUG nova.network.neutron [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.914690] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85ec10a6-ca58-4638-a651-4b1f9b1fa34b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.924472] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a43953-cbec-4631-8df9-b2dad62aaee1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.958414] env[62508]: DEBUG nova.compute.manager [req-e6a1864c-2735-4457-8cdf-6b6f70138e02 req-05836da6-b7d6-4015-b630-0f2708bb7dee service nova] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Detach interface failed, port_id=934ffcfc-e193-4a25-9167-be27718f24af, reason: Instance f465712f-f65a-4521-90ab-e9f5c5b6de5f could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1723.090028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.090160] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.090962] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b5930a-ceed-4172-abe0-49127a1cb4d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.108522] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6d2269-c7a3-4d8a-aa88-c5a433db0c88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.137753] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfiguring VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1723.138070] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-679ce3c6-0c92-4146-8bfa-17daf4db2b71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.157517] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1723.157517] env[62508]: value = "task-1776458" [ 1723.157517] env[62508]: _type = "Task" [ 1723.157517] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.165265] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.242336] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.277818] env[62508]: INFO nova.compute.claims [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.298513] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] There are 62 instances to clean {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1723.298786] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2a564423-a2fd-4873-885f-37777bdd83eb] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1723.392632] env[62508]: INFO nova.compute.manager [-] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Took 1.30 seconds to deallocate network for instance. [ 1723.668681] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.734058] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bb62dd62-2ab2-4cb7-98c7-5f55bf1881c4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.745402] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776457, 'name': ReconfigVM_Task, 'duration_secs': 0.661923} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.745721] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfigured VM instance instance-00000058 to attach disk [datastore1] f3e1c48d-9aaf-415f-8234-82a71bb469ee/f3e1c48d-9aaf-415f-8234-82a71bb469ee.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1723.746482] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ade372f6-15ee-4046-8d62-6143d9d44e8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.753140] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1723.753140] env[62508]: value = "task-1776459" [ 1723.753140] env[62508]: _type = "Task" [ 1723.753140] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.761735] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776459, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.802350] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 9bafe786-5815-4871-8405-558cac7b3654] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1723.805725] env[62508]: INFO nova.compute.resource_tracker [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating resource usage from migration e9f19fd5-c421-44e8-bc0f-0cbc96a8fa64 [ 1723.900022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.065419] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77974f9f-19c1-47df-91bc-b49d58f83b24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.073933] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c2d71d-ae78-4394-9259-66949799d62b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.110139] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537f8531-342b-4ed4-9bf2-1e4256cabe19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.118627] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d66e57a-a4a2-4921-8c06-d61548670aa8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.132837] env[62508]: DEBUG nova.compute.provider_tree [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1724.168882] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.263810] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776459, 'name': Rename_Task, 'duration_secs': 0.217602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.264158] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1724.264424] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93a71ebc-1d9b-43fb-9e76-0280d70a28f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.273165] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1724.273165] env[62508]: value = "task-1776460" [ 1724.273165] env[62508]: _type = "Task" [ 1724.273165] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.282581] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.310387] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 73841c12-1ae9-46a5-bfe0-e0f82877667c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1724.636591] env[62508]: DEBUG nova.scheduler.client.report [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.669853] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.733408] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.733733] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.733971] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.734206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.734382] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.736635] env[62508]: INFO nova.compute.manager [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Terminating instance [ 1724.738397] env[62508]: DEBUG nova.compute.manager [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1724.738632] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1724.739470] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f7eaa6-2e49-4e04-b24e-2711d0170147 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.747378] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1724.747601] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44dc21ce-a9aa-420f-8bd4-2bd869b8d1bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.754148] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1724.754148] env[62508]: value = "task-1776461" [ 1724.754148] env[62508]: _type = "Task" [ 1724.754148] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.763894] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.782555] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776460, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.813647] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 5b3dd9d0-7f30-45c2-931a-ce7175820710] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1725.142529] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.371s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.142862] env[62508]: INFO nova.compute.manager [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Migrating [ 1725.149335] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.600s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.149527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.151526] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.597s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.151713] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.153372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.496s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.155108] env[62508]: INFO nova.compute.claims [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1725.174651] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.180475] env[62508]: INFO nova.scheduler.client.report [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Deleted allocations for instance deee2c81-4d2c-47d3-aae6-ef829d59c644 [ 1725.182135] env[62508]: INFO nova.scheduler.client.report [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted allocations for instance e478855d-e9c7-4abc-8e22-a4b2eb0c7310 [ 1725.264834] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776461, 'name': PowerOffVM_Task, 'duration_secs': 0.343131} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.264834] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1725.265061] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1725.265272] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4473276c-bf76-4ed9-ac6d-44c274d5ad9e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.283894] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776460, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.316973] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: c45b2c35-e58a-4ffa-861a-980747e552a1] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1725.366011] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1725.366385] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1725.366638] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] 145306d7-f0e8-46c0-b2ab-1c41c208f976 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1725.367253] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1dc76705-9cbc-49ee-b9cb-3f0e88ddf02f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.374593] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1725.374593] env[62508]: value = "task-1776463" [ 1725.374593] env[62508]: _type = "Task" [ 1725.374593] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.382493] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.666051] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.666051] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.666051] env[62508]: DEBUG nova.network.neutron [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.679277] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.694957] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff8e2f4f-dcc1-427e-a635-25c22e339ec4 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "e478855d-e9c7-4abc-8e22-a4b2eb0c7310" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.844s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.696137] env[62508]: DEBUG oslo_concurrency.lockutils [None req-33ca383c-3e03-453f-9421-4f68e616adce tempest-VolumesAdminNegativeTest-730944924 tempest-VolumesAdminNegativeTest-730944924-project-member] Lock "deee2c81-4d2c-47d3-aae6-ef829d59c644" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.463s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.784870] env[62508]: DEBUG oslo_vmware.api [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776460, 'name': PowerOnVM_Task, 'duration_secs': 1.068608} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.785206] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.785429] env[62508]: INFO nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Took 11.84 seconds to spawn the instance on the hypervisor. [ 1725.785608] env[62508]: DEBUG nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1725.786444] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e991f27-4e1e-4060-9e7c-b37d4b930f37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.821241] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e875f30e-2c25-46a4-8c74-36f08e7eb982] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1725.885225] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.184643] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.309356] env[62508]: INFO nova.compute.manager [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Took 28.12 seconds to build instance. [ 1726.325550] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 9283494f-d8e2-4077-9e4d-57ee4786c3c7] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1726.388453] env[62508]: DEBUG oslo_vmware.api [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.55871} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.388781] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.389008] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1726.389384] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1726.389567] env[62508]: INFO nova.compute.manager [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1726.389813] env[62508]: DEBUG oslo.service.loopingcall [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.390030] env[62508]: DEBUG nova.compute.manager [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1726.390133] env[62508]: DEBUG nova.network.neutron [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1726.441324] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5ef769-124e-4f22-a0d8-88d889e5526f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.445499] env[62508]: DEBUG nova.network.neutron [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.454885] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2244342-3147-4333-b6b1-97073ee086d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.491570] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e54837-171e-431c-91f8-ce6a368d3115 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.499457] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9680b403-c9ab-4a32-8ecf-2a58295a64af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.514557] env[62508]: DEBUG nova.compute.provider_tree [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.684488] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.693936] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.694189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.812875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-47d21405-4064-4061-ba92-7dc2b7092508 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.636s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.829531] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 091a11ef-d6c7-4f04-90a6-273da14ce88b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1726.950782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.018461] env[62508]: DEBUG nova.scheduler.client.report [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.189025] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.196258] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1727.332743] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a617fe8b-c70e-4988-a6ce-437ccc5261c6] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1727.421130] env[62508]: DEBUG nova.compute.manager [req-e9621027-c932-4540-9e76-fbaa1956dee5 req-3d7cd466-f9bb-4471-b143-f15d957e2113 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Received event network-vif-deleted-ec2a5bd1-b682-40fe-825d-7029eb22f70e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.421374] env[62508]: INFO nova.compute.manager [req-e9621027-c932-4540-9e76-fbaa1956dee5 req-3d7cd466-f9bb-4471-b143-f15d957e2113 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Neutron deleted interface ec2a5bd1-b682-40fe-825d-7029eb22f70e; detaching it from the instance and deleting it from the info cache [ 1727.421548] env[62508]: DEBUG nova.network.neutron [req-e9621027-c932-4540-9e76-fbaa1956dee5 req-3d7cd466-f9bb-4471-b143-f15d957e2113 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.523218] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.523760] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1727.526437] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.846s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.528223] env[62508]: INFO nova.compute.claims [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1727.691827] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.733438] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.752938] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.753916] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.836636] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2e32ca83-8506-4588-bd33-4eadb7d2d30a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1727.892917] env[62508]: DEBUG nova.network.neutron [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.925021] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddb2b5e9-42cc-4d93-9758-72cbddc031dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.937276] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d42830d-cf1c-40af-9a6f-74ceb9a3b5cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.971516] env[62508]: DEBUG nova.compute.manager [req-e9621027-c932-4540-9e76-fbaa1956dee5 req-3d7cd466-f9bb-4471-b143-f15d957e2113 service nova] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Detach interface failed, port_id=ec2a5bd1-b682-40fe-825d-7029eb22f70e, reason: Instance 145306d7-f0e8-46c0-b2ab-1c41c208f976 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1728.039839] env[62508]: DEBUG nova.compute.utils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1728.040071] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1728.040249] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1728.150370] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1728.151102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ce0428-75b8-4ce4-a51d-9f89615cfb40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.155424] env[62508]: DEBUG nova.policy [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5d94bc165341d1ab686cadece5f7f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecc6c09064734ca381a22d894304cd80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1728.159120] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1728.159312] env[62508]: ERROR oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk due to incomplete transfer. [ 1728.159540] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-640c0d45-6576-4198-9aa8-baa6405484b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.169184] env[62508]: DEBUG oslo_vmware.rw_handles [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d40770-3178-1bcb-a263-eb1f5060c977/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1728.169184] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Uploaded image cf41ffb1-ea55-4c93-bf70-33b17e44f550 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1728.169297] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1728.169543] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cdf09aea-e597-435c-8c00-edc1ae1b69ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.177701] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1728.177701] env[62508]: value = "task-1776464" [ 1728.177701] env[62508]: _type = "Task" [ 1728.177701] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.188448] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.194153] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776464, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.257420] env[62508]: DEBUG nova.compute.utils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1728.340466] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 4cc6d0f4-413a-44e1-850f-da499f582d15] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1728.397975] env[62508]: INFO nova.compute.manager [-] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Took 2.01 seconds to deallocate network for instance. [ 1728.478864] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abf3e62-d01f-4e32-bc79-f0692eec47cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.498131] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1728.544720] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1728.628646] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Successfully created port: 4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1728.685097] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.692846] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776464, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.762127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.833896] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8ce349-e26b-4a85-ba38-63a2c0857042 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.842955] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856d1394-71c3-46fd-8404-c9596d0279bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.847102] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a72fd09e-a3be-486a-a03b-8c25b04d82d0] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1728.884022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c3333a-0532-4ee5-b1e5-913f064cd0dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.891285] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b40fc0-cc44-4875-ae44-25338f436600 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.908788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.909146] env[62508]: DEBUG nova.compute.provider_tree [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.005533] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1729.008186] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e03a147c-9042-4817-adc8-318cf64ae99f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.014138] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1729.014138] env[62508]: value = "task-1776465" [ 1729.014138] env[62508]: _type = "Task" [ 1729.014138] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.023652] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.188612] env[62508]: DEBUG oslo_vmware.api [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776458, 'name': ReconfigVM_Task, 'duration_secs': 5.798042} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.191758] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.191971] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfigured VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1729.193989] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776464, 'name': Destroy_Task, 'duration_secs': 0.966267} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.194241] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Destroyed the VM [ 1729.194498] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1729.194991] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4aed0e3a-373d-4739-8706-164ea7e33a04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.201325] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1729.201325] env[62508]: value = "task-1776466" [ 1729.201325] env[62508]: _type = "Task" [ 1729.201325] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.209164] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776466, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.354294] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 14c911d6-44c2-4c56-a027-3d25a1e58bcc] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1729.412171] env[62508]: DEBUG nova.scheduler.client.report [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1729.525267] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776465, 'name': PowerOffVM_Task, 'duration_secs': 0.242195} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.525577] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1729.527355] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1729.561216] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1729.591705] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1729.591906] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1729.591906] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.592086] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1729.592240] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.592391] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1729.592596] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1729.592755] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1729.592922] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1729.593335] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1729.593580] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1729.594722] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8314c228-6b0d-4088-a04d-bb23a0166862 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.607667] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b032aa-4b7a-421d-ab6d-b31bccfa0185 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.683152] env[62508]: DEBUG nova.compute.manager [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-deleted-09990164-6bb3-4ed4-ba3a-f67204a82380 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.683152] env[62508]: INFO nova.compute.manager [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Neutron deleted interface 09990164-6bb3-4ed4-ba3a-f67204a82380; detaching it from the instance and deleting it from the info cache [ 1729.683524] env[62508]: DEBUG nova.network.neutron [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "address": "fa:16:3e:d1:cd:b5", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap594da101-91", "ovs_interfaceid": "594da101-91d1-4ce2-a09a-bd16f9fcc148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.712072] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776466, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.836056] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.836371] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.836591] env[62508]: INFO nova.compute.manager [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Attaching volume 7d9b661a-9d4e-4f18-b54b-a757d6cdd88c to /dev/sdb [ 1729.862953] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a9b92a6d-154c-42bb-842c-bc42a07299a0] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1729.880164] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a4f217-5521-4098-8d9f-03819fefba1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.887021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49c260d-692b-4995-ab59-3e3381982d06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.901853] env[62508]: DEBUG nova.virt.block_device [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updating existing volume attachment record: 4fa739e2-c392-4a3e-8952-79d6d9ddfd6f {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1729.917997] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.917997] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1729.921138] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.020s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.921138] env[62508]: DEBUG nova.objects.instance [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lazy-loading 'resources' on Instance uuid f465712f-f65a-4521-90ab-e9f5c5b6de5f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1730.015266] env[62508]: DEBUG nova.compute.manager [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-deleted-594da101-91d1-4ce2-a09a-bd16f9fcc148 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1730.015466] env[62508]: INFO nova.compute.manager [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Neutron deleted interface 594da101-91d1-4ce2-a09a-bd16f9fcc148; detaching it from the instance and deleting it from the info cache [ 1730.015797] env[62508]: DEBUG nova.network.neutron [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1730.036369] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1730.038427] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1730.038914] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1730.038914] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1730.039200] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1730.039445] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1730.048593] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a86b236-47ef-4d2e-8ba1-3a765d9d6e64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.068063] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1730.068063] env[62508]: value = "task-1776468" [ 1730.068063] env[62508]: _type = "Task" [ 1730.068063] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.082618] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776468, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.191026] env[62508]: DEBUG oslo_concurrency.lockutils [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.191026] env[62508]: DEBUG oslo_concurrency.lockutils [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] Acquired lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.191026] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7198488e-6a4c-49a8-8002-2b3461ccb590 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.213712] env[62508]: DEBUG oslo_concurrency.lockutils [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] Releasing lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.216017] env[62508]: WARNING nova.compute.manager [req-85913dc4-7634-45c1-9a12-15879526ac35 req-38464996-1079-49a5-b948-d05edda79d73 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Detach interface failed, port_id=09990164-6bb3-4ed4-ba3a-f67204a82380, reason: No device with interface-id 09990164-6bb3-4ed4-ba3a-f67204a82380 exists on VM: nova.exception.NotFound: No device with interface-id 09990164-6bb3-4ed4-ba3a-f67204a82380 exists on VM [ 1730.226435] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776466, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.252780] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Successfully updated port: 4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1730.365675] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 29223197-9a79-45cc-baa6-3deb731ec08e] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1730.423775] env[62508]: DEBUG nova.compute.utils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1730.431143] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1730.431143] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1730.519191] env[62508]: DEBUG oslo_concurrency.lockutils [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.519377] env[62508]: DEBUG oslo_concurrency.lockutils [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Acquired lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.520798] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263f00e9-be2e-42a2-8b52-c7f7f20e0564 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.548463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67837589-f0f1-43a2-b17a-08120a87e892 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.559526] env[62508]: DEBUG nova.policy [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5d94bc165341d1ab686cadece5f7f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecc6c09064734ca381a22d894304cd80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1730.578045] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.584045] env[62508]: DEBUG nova.virt.vmwareapi.vmops [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfiguring VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1730.590456] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e407e2b-91b0-49f0-806f-06745a9c0be1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.611033] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776468, 'name': ReconfigVM_Task, 'duration_secs': 0.190935} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.611676] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1730.614996] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Waiting for the task: (returnval){ [ 1730.614996] env[62508]: value = "task-1776471" [ 1730.614996] env[62508]: _type = "Task" [ 1730.614996] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.624191] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.655675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.655894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.656098] env[62508]: DEBUG nova.network.neutron [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1730.726880] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776466, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.755804] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.755988] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.756289] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1730.795079] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced99a58-281b-4959-a93f-7ca9a1d041a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.806776] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8960ca-eca5-49b5-8e6c-054e826804e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.843124] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a798281-29c6-4c00-a24e-1ef2a6176c6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.851903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b18315-1dff-4cfc-8613-3fb41beac7f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.868572] env[62508]: DEBUG nova.compute.provider_tree [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.874291] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 4bf92157-1d8c-4c3c-bc61-adb6d26bff54] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1730.926713] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1730.951041] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Successfully created port: 153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1731.119299] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1731.120233] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1731.120233] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.120233] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1731.120428] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.120581] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1731.120880] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1731.121120] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1731.121386] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1731.121613] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1731.121871] env[62508]: DEBUG nova.virt.hardware [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1731.130501] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1731.131678] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55a484bd-6e9f-4bea-bc62-2f8124c3b0a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.157526] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.159233] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1731.159233] env[62508]: value = "task-1776472" [ 1731.159233] env[62508]: _type = "Task" [ 1731.159233] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.171508] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776472, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.226407] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776466, 'name': RemoveSnapshot_Task, 'duration_secs': 1.7035} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.226732] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1731.227011] env[62508]: DEBUG nova.compute.manager [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1731.228011] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05f77c2-5afc-4d1c-b01a-221c1ed69fbd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.296813] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1731.373923] env[62508]: DEBUG nova.scheduler.client.report [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1731.381664] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 4d24bacc-48c4-4649-bb29-fcae2cf77782] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.220226] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.300s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.222183] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 68d64a06-f752-459c-a152-157893e79bfd] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.224589] env[62508]: DEBUG nova.compute.manager [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Received event network-vif-plugged-4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.224781] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Acquiring lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.224975] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.225153] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.225858] env[62508]: DEBUG nova.compute.manager [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] No waiting events found dispatching network-vif-plugged-4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1732.225858] env[62508]: WARNING nova.compute.manager [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Received unexpected event network-vif-plugged-4f1783a7-f095-41fa-9b4f-cf3505149a6e for instance with vm_state building and task_state spawning. [ 1732.225858] env[62508]: DEBUG nova.compute.manager [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Received event network-changed-4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.225858] env[62508]: DEBUG nova.compute.manager [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Refreshing instance network info cache due to event network-changed-4f1783a7-f095-41fa-9b4f-cf3505149a6e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1732.226067] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Acquiring lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.226671] env[62508]: INFO nova.compute.manager [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Shelve offloading [ 1732.237130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.503s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.237635] env[62508]: INFO nova.compute.claims [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1732.241587] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1732.242147] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a87f7c11-f06c-4958-a776-b6ec559cb272 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.249924] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776472, 'name': ReconfigVM_Task, 'duration_secs': 0.267465} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.254672] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1732.255518] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.255805] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1732.255805] env[62508]: value = "task-1776474" [ 1732.255805] env[62508]: _type = "Task" [ 1732.255805] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.257226] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bbdef4-c189-46f0-96b9-b57e48dc6bd8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.261044] env[62508]: INFO nova.scheduler.client.report [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted allocations for instance f465712f-f65a-4521-90ab-e9f5c5b6de5f [ 1732.293651] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1732.294619] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1732.294619] env[62508]: DEBUG nova.compute.manager [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1732.294767] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4b12b77-7f92-48a8-8c77-da23d3c42968 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.309224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8e4161-376e-41db-b6d4-f89f302c436c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.316263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.317034] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.317034] env[62508]: DEBUG nova.network.neutron [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1732.323025] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1732.323025] env[62508]: value = "task-1776475" [ 1732.323025] env[62508]: _type = "Task" [ 1732.323025] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.327820] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776475, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.328827] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Updating instance_info_cache with network_info: [{"id": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "address": "fa:16:3e:e1:21:09", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1783a7-f0", "ovs_interfaceid": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.434346] env[62508]: DEBUG nova.network.neutron [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [{"id": "be5b5d9b-1f22-455a-b4f6-128f17030129", "address": "fa:16:3e:8e:37:9c", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5b5d9b-1f", "ovs_interfaceid": "be5b5d9b-1f22-455a-b4f6-128f17030129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.577460] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Successfully updated port: 153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1732.726864] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.729708] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 80a9e17e-4095-498c-80c8-200bfb4f3d1f] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.732466] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1732.756204] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1732.756455] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1732.756625] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.756822] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1732.756987] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.757156] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1732.757361] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1732.757527] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1732.757683] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1732.757841] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1732.758018] env[62508]: DEBUG nova.virt.hardware [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1732.759187] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ba37d2-5b90-4887-ae94-fdccb8e17d51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.768188] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6ac3b1-17e9-4389-a4dc-a8be7a94ca09 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.775126] env[62508]: DEBUG oslo_concurrency.lockutils [None req-68b5a5b5-d56d-4ede-9c7f-be80482ec558 tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "f465712f-f65a-4521-90ab-e9f5c5b6de5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.893s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.832276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.832596] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Instance network_info: |[{"id": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "address": "fa:16:3e:e1:21:09", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1783a7-f0", "ovs_interfaceid": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1732.832894] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776475, 'name': ReconfigVM_Task, 'duration_secs': 0.395044} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.833140] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Acquired lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.833308] env[62508]: DEBUG nova.network.neutron [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Refreshing network info cache for port 4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1732.834570] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:21:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f1783a7-f095-41fa-9b4f-cf3505149a6e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1732.843915] env[62508]: DEBUG oslo.service.loopingcall [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.844177] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c/95a289ac-3178-45ea-80d2-905b9af54f3c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1732.846789] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1732.852836] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1732.853605] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-150ab180-d045-440e-8553-3409a8f24e07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.879945] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1732.879945] env[62508]: value = "task-1776476" [ 1732.879945] env[62508]: _type = "Task" [ 1732.879945] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.889261] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776476, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.939876] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.084027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.084027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.084027] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1733.136292] env[62508]: DEBUG nova.network.neutron [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.172577] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.172828] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.173066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.173288] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.173465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.176852] env[62508]: INFO nova.compute.manager [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Terminating instance [ 1733.178849] env[62508]: DEBUG nova.compute.manager [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1733.179119] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1733.180395] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1b058-1ef5-4a0b-a4ef-31d035825fd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.188735] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1733.189668] env[62508]: DEBUG nova.network.neutron [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Updated VIF entry in instance network info cache for port 4f1783a7-f095-41fa-9b4f-cf3505149a6e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.189994] env[62508]: DEBUG nova.network.neutron [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Updating instance_info_cache with network_info: [{"id": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "address": "fa:16:3e:e1:21:09", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1783a7-f0", "ovs_interfaceid": "4f1783a7-f095-41fa-9b4f-cf3505149a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.191183] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12b2945d-b0e9-4520-91a1-fa2440e098d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.198515] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1733.198515] env[62508]: value = "task-1776477" [ 1733.198515] env[62508]: _type = "Task" [ 1733.198515] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.207377] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.234767] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.237283] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: cdb1ccaf-83b3-48f8-92da-aca2310863ac] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.356016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d6949a-8fc1-4707-a775-c130aef5648c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.378360] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c4ecab-0f39-49e2-801c-789b3be0b343 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.399261] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1733.408911] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776476, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.446673] env[62508]: DEBUG oslo_concurrency.lockutils [None req-08137d55-0d1f-4ab9-a6df-cc122846e11b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-a058273e-9c68-4d73-9149-ceb60c1c1cda-09990164-6bb3-4ed4-ba3a-f67204a82380" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.860s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.511590] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2029a323-7585-436a-a58e-8a304617f8d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.519590] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c400d970-60ec-493e-a65f-13a4f6f5fca6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.553954] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0ee98d-b9d6-48f5-a19e-d30681887392 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.562369] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5563104f-43b9-4f94-963f-e8ac440e26d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.576120] env[62508]: DEBUG nova.compute.provider_tree [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.627464] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1733.639409] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.695266] env[62508]: DEBUG oslo_concurrency.lockutils [req-ac616fe8-9a7a-4f2d-a422-9746aa48626b req-d6c1bc74-8dfd-48e7-bd86-2ff97ce74758 service nova] Releasing lock "refresh_cache-b4427ba0-4dcf-4b21-a584-a7fee560f135" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.709680] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776477, 'name': PowerOffVM_Task, 'duration_secs': 0.206979} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.712648] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1733.712761] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1733.713165] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74792005-603e-4157-bea3-ad321abaa2a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.729293] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.740041] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fd658703-d477-4d21-b0ad-7ff08d4c2f97] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.807497] env[62508]: DEBUG nova.network.neutron [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Updating instance_info_cache with network_info: [{"id": "153213b8-b5cb-4074-8748-81bb5e028c02", "address": "fa:16:3e:db:47:5e", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153213b8-b5", "ovs_interfaceid": "153213b8-b5cb-4074-8748-81bb5e028c02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.889379] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1733.889743] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1733.890067] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleting the datastore file [datastore1] bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1733.890924] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32ca7abe-3bdf-4030-a267-9bb95a89bb7c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.899949] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776476, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.904308] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for the task: (returnval){ [ 1733.904308] env[62508]: value = "task-1776479" [ 1733.904308] env[62508]: _type = "Task" [ 1733.904308] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.916119] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.977491] env[62508]: DEBUG nova.network.neutron [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Port d9e88907-9194-4d90-87ab-d1b87ef0e48e binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1734.048204] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1734.049183] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58420d0-e3e9-4034-9eac-5a66de55ee8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.058023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1734.058235] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6889ad0-013f-45ac-9e84-dfd085d4f521 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.076089] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Received event network-vif-plugged-153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.076366] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquiring lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.076535] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.076699] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.076862] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] No waiting events found dispatching network-vif-plugged-153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1734.077034] env[62508]: WARNING nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Received unexpected event network-vif-plugged-153213b8-b5cb-4074-8748-81bb5e028c02 for instance with vm_state building and task_state spawning. [ 1734.077199] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Received event network-changed-153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.077352] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Refreshing instance network info cache due to event network-changed-153213b8-b5cb-4074-8748-81bb5e028c02. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1734.077513] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquiring lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.079720] env[62508]: DEBUG nova.scheduler.client.report [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1734.233650] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.238379] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1734.238672] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1734.238773] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1734.239050] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a878cef6-31eb-405c-8818-176688499c8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.245379] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1734.245379] env[62508]: value = "task-1776481" [ 1734.245379] env[62508]: _type = "Task" [ 1734.245379] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.250071] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a0245a18-638d-4c32-bea2-456408b5e001] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.257982] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.310447] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.310964] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Instance network_info: |[{"id": "153213b8-b5cb-4074-8748-81bb5e028c02", "address": "fa:16:3e:db:47:5e", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153213b8-b5", "ovs_interfaceid": "153213b8-b5cb-4074-8748-81bb5e028c02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1734.311382] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquired lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.311664] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Refreshing network info cache for port 153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1734.313478] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:47:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '153213b8-b5cb-4074-8748-81bb5e028c02', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1734.324624] env[62508]: DEBUG oslo.service.loopingcall [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.328405] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1734.328959] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c382647-c7ed-4e59-98f0-eef1ae03c331 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.356249] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1734.356249] env[62508]: value = "task-1776482" [ 1734.356249] env[62508]: _type = "Task" [ 1734.356249] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.366230] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776482, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.392015] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776476, 'name': CreateVM_Task, 'duration_secs': 1.040385} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.392203] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1734.392928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.393108] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.393431] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1734.393808] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c626750-8325-4de3-ace8-344eb4b59534 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.398383] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1734.398383] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e86ba-0d8e-baf3-bfc1-48c6002847a0" [ 1734.398383] env[62508]: _type = "Task" [ 1734.398383] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.412357] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e86ba-0d8e-baf3-bfc1-48c6002847a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.417907] env[62508]: DEBUG oslo_vmware.api [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Task: {'id': task-1776479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368118} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.418836] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1734.418836] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1734.418836] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1734.418836] env[62508]: INFO nova.compute.manager [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1734.419075] env[62508]: DEBUG oslo.service.loopingcall [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.420530] env[62508]: DEBUG nova.compute.manager [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1734.420530] env[62508]: DEBUG nova.network.neutron [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1734.456189] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1734.456412] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368791', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'name': 'volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3e1c48d-9aaf-415f-8234-82a71bb469ee', 'attached_at': '', 'detached_at': '', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'serial': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1734.457410] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec70c7-8bd6-469b-8c0f-a7b3eb33e2c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.477365] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb679ad3-928e-4fc0-addc-dd167b706a62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.508008] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c/volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.511174] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6662719e-87f7-49e7-8fde-ffe185131841 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.533045] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1734.533045] env[62508]: value = "task-1776483" [ 1734.533045] env[62508]: _type = "Task" [ 1734.533045] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.543012] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.585633] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.586231] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1734.589766] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.682s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.590047] env[62508]: DEBUG nova.objects.instance [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'resources' on Instance uuid 145306d7-f0e8-46c0-b2ab-1c41c208f976 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1734.645960] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Updated VIF entry in instance network info cache for port 153213b8-b5cb-4074-8748-81bb5e028c02. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1734.646441] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Updating instance_info_cache with network_info: [{"id": "153213b8-b5cb-4074-8748-81bb5e028c02", "address": "fa:16:3e:db:47:5e", "network": {"id": "c194e816-3ebb-4cf6-852c-09521019b177", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1154243421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecc6c09064734ca381a22d894304cd80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153213b8-b5", "ovs_interfaceid": "153213b8-b5cb-4074-8748-81bb5e028c02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.732777] env[62508]: DEBUG nova.compute.manager [req-1b010806-3f09-4027-9274-29e94d1e4eb2 req-2c46ba9a-e6e1-4335-a482-5411f89915a9 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Received event network-vif-deleted-8570ede3-d3fc-41d9-90a0-3dc1ef777446 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.733142] env[62508]: INFO nova.compute.manager [req-1b010806-3f09-4027-9274-29e94d1e4eb2 req-2c46ba9a-e6e1-4335-a482-5411f89915a9 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Neutron deleted interface 8570ede3-d3fc-41d9-90a0-3dc1ef777446; detaching it from the instance and deleting it from the info cache [ 1734.733421] env[62508]: DEBUG nova.network.neutron [req-1b010806-3f09-4027-9274-29e94d1e4eb2 req-2c46ba9a-e6e1-4335-a482-5411f89915a9 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.738563] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.755183] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 6ae078f6-3b96-4b49-b282-cae74d742c97] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.758040] env[62508]: DEBUG oslo_vmware.api [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157136} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.758345] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1734.758549] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1734.758760] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1734.794302] env[62508]: INFO nova.scheduler.client.report [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted allocations for instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 [ 1734.866386] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776482, 'name': CreateVM_Task, 'duration_secs': 0.492491} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.866576] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1734.867339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.909698] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e86ba-0d8e-baf3-bfc1-48c6002847a0, 'name': SearchDatastore_Task, 'duration_secs': 0.022935} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.909981] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.910274] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.910497] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.910642] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.910819] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1734.911207] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.911446] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1734.911787] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2e7b4a8-794a-48f4-a341-fecb65fefada {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.914411] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db186142-c991-47a2-b760-4a667d6d726c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.919236] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1734.919236] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524b4121-bb37-948f-b47b-32c0ea272e67" [ 1734.919236] env[62508]: _type = "Task" [ 1734.919236] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.923770] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1734.923979] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1734.925041] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52c89072-36f7-47a3-acdf-4ce4148f2d88 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.930072] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524b4121-bb37-948f-b47b-32c0ea272e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.932838] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1734.932838] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52acbda0-3e86-4170-327b-e3ab6cea5281" [ 1734.932838] env[62508]: _type = "Task" [ 1734.932838] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.940520] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52acbda0-3e86-4170-327b-e3ab6cea5281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.024219] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.024444] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.024627] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.044488] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776483, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.093515] env[62508]: DEBUG nova.compute.utils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1735.098081] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1735.098314] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1735.149139] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Releasing lock "refresh_cache-954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.149576] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-vif-unplugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.150712] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.150712] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.150712] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.150712] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] No waiting events found dispatching network-vif-unplugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1735.150712] env[62508]: WARNING nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received unexpected event network-vif-unplugged-a51ee93a-fba9-4802-9791-4c16f273346e for instance with vm_state shelved and task_state shelving_offloading. [ 1735.150712] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.150935] env[62508]: DEBUG nova.compute.manager [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing instance network info cache due to event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1735.151049] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.151212] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.151377] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1735.159027] env[62508]: DEBUG nova.policy [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '712ef76e285f48e6b5e8f75aa2fee850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce113e91e2b74136a8050ed3acf3557c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1735.206418] env[62508]: DEBUG nova.network.neutron [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.236591] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.243024] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60bac861-b26e-4fca-87ad-fb10dea14fc3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.250539] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf03a40-83ea-499c-9856-00516f78edc9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.263791] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: f456dd83-6350-46b2-b06c-41dc5c477358] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.286829] env[62508]: DEBUG nova.compute.manager [req-1b010806-3f09-4027-9274-29e94d1e4eb2 req-2c46ba9a-e6e1-4335-a482-5411f89915a9 service nova] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Detach interface failed, port_id=8570ede3-d3fc-41d9-90a0-3dc1ef777446, reason: Instance bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1735.295806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.369832] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a48e13b-4adc-4303-bfa2-987eafa8b887 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.379520] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d0ab7f-d049-4fa9-8305-dcf26ac2d98f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.412141] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051fb6ee-0d65-4e86-b1b6-a679034f0de7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.420357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9bb716-159f-4552-a16c-8a60d1dc63d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.445608] env[62508]: DEBUG nova.compute.provider_tree [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1735.446578] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524b4121-bb37-948f-b47b-32c0ea272e67, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.449868] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.450120] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1735.450842] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.456802] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52acbda0-3e86-4170-327b-e3ab6cea5281, 'name': SearchDatastore_Task, 'duration_secs': 0.009008} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.457835] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a783a43e-4310-47d3-a9a6-c9b122c3598e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.463683] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1735.463683] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c0993c-1bd4-3561-714e-653dfd0a071d" [ 1735.463683] env[62508]: _type = "Task" [ 1735.463683] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.471134] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c0993c-1bd4-3561-714e-653dfd0a071d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.544735] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776483, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.546745] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Successfully created port: 87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1735.599345] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1735.710667] env[62508]: INFO nova.compute.manager [-] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Took 1.29 seconds to deallocate network for instance. [ 1735.734149] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.766778] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 38289797-ecf5-4207-a164-d70228e4411d] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.969188] env[62508]: ERROR nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [req-f96f1b2a-fa8a-4ac3-8f07-2dabaad0b2c4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f96f1b2a-fa8a-4ac3-8f07-2dabaad0b2c4"}]} [ 1735.977890] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c0993c-1bd4-3561-714e-653dfd0a071d, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.978175] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.978467] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b4427ba0-4dcf-4b21-a584-a7fee560f135/b4427ba0-4dcf-4b21-a584-a7fee560f135.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1735.978753] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.978937] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1735.979167] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-067cd6d4-8631-441c-a39a-2137cfc78de0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.981317] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bdd1d21-b164-4eef-9a23-e419bb58efa4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.989019] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1735.989799] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1735.989799] env[62508]: value = "task-1776484" [ 1735.989799] env[62508]: _type = "Task" [ 1735.989799] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.991235] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1735.991520] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1735.995017] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f9e3ec0-2d38-4238-bf8c-395b962f7003 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.000757] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1736.000757] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260f8e0-0cf4-2c3b-e05e-d4ca8ef2d6a7" [ 1736.000757] env[62508]: _type = "Task" [ 1736.000757] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.004767] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1736.005181] env[62508]: DEBUG nova.compute.provider_tree [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.007329] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.015858] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5260f8e0-0cf4-2c3b-e05e-d4ca8ef2d6a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009454} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.016964] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c9b41ed-f7c8-473e-87d4-f5276348499d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.020392] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1736.029018] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1736.029018] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d575f-c6d6-c89e-7e1a-f4712c69dd47" [ 1736.029018] env[62508]: _type = "Task" [ 1736.029018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.036969] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d575f-c6d6-c89e-7e1a-f4712c69dd47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.041572] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1736.048279] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776483, 'name': ReconfigVM_Task, 'duration_secs': 1.513396} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.048555] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c/volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1736.055179] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca77aa74-d25e-4323-9ced-e4cba1f700f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.070990] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1736.070990] env[62508]: value = "task-1776485" [ 1736.070990] env[62508]: _type = "Task" [ 1736.070990] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.082546] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.183865] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.184075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.184260] env[62508]: DEBUG nova.network.neutron [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.219235] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updated VIF entry in instance network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1736.219758] env[62508]: DEBUG nova.network.neutron [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.222239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.235564] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.269840] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e156aef5-bb56-4c17-9e7e-9419b672c9cf] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.323988] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4733ee9-68a1-4f43-8f35-0bc7d60f1470 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.334041] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a501307e-a622-4706-87e0-f349a425b1b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.374044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bc4dc3-867d-45ea-aae2-65e5b8f4d379 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.386313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.386574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.395290] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb5e6f-aac6-443d-9b5f-503551ba95f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.411196] env[62508]: DEBUG nova.compute.provider_tree [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.500706] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776484, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.537269] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525d575f-c6d6-c89e-7e1a-f4712c69dd47, 'name': SearchDatastore_Task, 'duration_secs': 0.008768} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.537558] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.537826] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7/954e23bc-3355-4ab6-ad81-ea7bc55b6ee7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1736.538104] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8343a0a6-4c48-477e-ae4d-2a2d584c93e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.545474] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1736.545474] env[62508]: value = "task-1776486" [ 1736.545474] env[62508]: _type = "Task" [ 1736.545474] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.553919] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.582040] env[62508]: DEBUG oslo_vmware.api [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776485, 'name': ReconfigVM_Task, 'duration_secs': 0.136718} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.585018] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368791', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'name': 'volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3e1c48d-9aaf-415f-8234-82a71bb469ee', 'attached_at': '', 'detached_at': '', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'serial': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1736.610037] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1736.635348] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1736.635632] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1736.635794] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1736.635982] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1736.636184] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1736.636366] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1736.636583] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1736.636744] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1736.636913] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1736.637126] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1736.637270] env[62508]: DEBUG nova.virt.hardware [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1736.638255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85ea412-468b-4abf-bc13-0a2610b762e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.651296] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60485bd0-b216-4a61-9744-d093e911b79d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.724670] env[62508]: DEBUG oslo_concurrency.lockutils [req-e6f390d7-0c9c-4f37-9c6d-39806196c7c3 req-e812cf16-9450-4cc8-bf75-d8f94f3e0714 service nova] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.740968] env[62508]: DEBUG oslo_vmware.api [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Task: {'id': task-1776471, 'name': ReconfigVM_Task, 'duration_secs': 5.816553} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.741465] env[62508]: DEBUG oslo_concurrency.lockutils [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] Releasing lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.741465] env[62508]: DEBUG nova.virt.vmwareapi.vmops [req-6fd89eba-7917-4e77-8483-be36f111c048 req-2219ca5e-472e-43b3-8ee2-8d5627dd67f6 service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Reconfigured VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1736.742212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.164s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.742212] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.742420] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.742570] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.744658] env[62508]: INFO nova.compute.manager [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Terminating instance [ 1736.747971] env[62508]: DEBUG nova.compute.manager [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1736.748617] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1736.749481] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee70cf2-868e-4703-9c63-51d67d1ca0bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.758817] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1736.759196] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1e1da15-293e-4004-bb8a-0318dcca7195 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.765639] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.765851] env[62508]: DEBUG oslo_vmware.api [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1736.765851] env[62508]: value = "task-1776487" [ 1736.765851] env[62508]: _type = "Task" [ 1736.765851] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.775490] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: f307d4d5-e877-4d0a-951c-779c1d2e573b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.777640] env[62508]: DEBUG oslo_vmware.api [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.900079] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1736.935270] env[62508]: ERROR nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [req-6c849925-8c08-4856-971c-11a58152665c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6c849925-8c08-4856-971c-11a58152665c"}]} [ 1736.944576] env[62508]: DEBUG nova.network.neutron [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.955043] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1736.977849] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1736.978188] env[62508]: DEBUG nova.compute.provider_tree [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.992334] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1737.005461] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.792175} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.006356] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b4427ba0-4dcf-4b21-a584-a7fee560f135/b4427ba0-4dcf-4b21-a584-a7fee560f135.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1737.006356] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1737.006356] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27553d5a-446b-4b5c-af08-8ed810ae625a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.012844] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1737.012844] env[62508]: value = "task-1776488" [ 1737.012844] env[62508]: _type = "Task" [ 1737.012844] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.020200] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1737.029670] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.058453] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776486, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.063569] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "5da47620-3979-44e8-91c5-154a1fe4ee48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.063928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.166362] env[62508]: DEBUG nova.compute.manager [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Received event network-vif-plugged-87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1737.166362] env[62508]: DEBUG oslo_concurrency.lockutils [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.166362] env[62508]: DEBUG oslo_concurrency.lockutils [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.167472] env[62508]: DEBUG oslo_concurrency.lockutils [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.167730] env[62508]: DEBUG nova.compute.manager [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] No waiting events found dispatching network-vif-plugged-87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1737.167906] env[62508]: WARNING nova.compute.manager [req-05042d77-3098-49df-b881-fd1f94600ec4 req-b39237b8-c359-4990-befd-abd3d1cb15a6 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Received unexpected event network-vif-plugged-87ce4777-2520-4432-a1ed-03e189684761 for instance with vm_state building and task_state spawning. [ 1737.282461] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 6afa4e73-64b4-4b10-b598-433f0c22ecb3] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.286396] env[62508]: DEBUG oslo_vmware.api [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776487, 'name': PowerOffVM_Task, 'duration_secs': 0.274877} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.286396] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1737.286396] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1737.286396] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a794ce5-474d-45d0-b73d-cb6d1211111d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.332165] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Successfully updated port: 87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1737.344024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00176e3a-d25d-4eda-b93c-48d4096a4243 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.353539] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d5e8a4-c466-43ac-b062-5ae0387fb5e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.388203] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c089a775-de9d-44be-9c2d-b569bc26f521 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.393928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.394167] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.400902] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef210100-9334-4316-a9ff-7bd8f87911ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.419999] env[62508]: DEBUG nova.compute.provider_tree [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.422937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.448343] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.522245] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087551} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.522524] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1737.523344] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12ae200-b371-448e-92d5-2b6798fbc3cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.545969] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] b4427ba0-4dcf-4b21-a584-a7fee560f135/b4427ba0-4dcf-4b21-a584-a7fee560f135.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1737.546683] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49f23290-a8e3-4cd9-9438-4027922599e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.568903] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1737.582136] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776486, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.583636] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1737.583636] env[62508]: value = "task-1776490" [ 1737.583636] env[62508]: _type = "Task" [ 1737.583636] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.591117] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.634628] env[62508]: DEBUG nova.objects.instance [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'flavor' on Instance uuid f3e1c48d-9aaf-415f-8234-82a71bb469ee {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1737.792000] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e07ab22e-bd07-4232-abfe-c0617c0b9813] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.835178] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.835397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.835612] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.896649] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1737.923290] env[62508]: DEBUG nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1737.969375] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed00e887-33c9-472a-b411-a998c7acbc57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.988408] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6113837f-a14f-4394-a127-66bda160f629 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.995330] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1738.082537] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776486, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.055281} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.082808] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7/954e23bc-3355-4ab6-ad81-ea7bc55b6ee7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1738.083441] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1738.083441] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27aca3c5-5299-4449-b03d-5b377614d5c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.089173] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.092847] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1738.092847] env[62508]: value = "task-1776491" [ 1738.092847] env[62508]: _type = "Task" [ 1738.092847] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.095568] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776490, 'name': ReconfigVM_Task, 'duration_secs': 0.316075} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.098632] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Reconfigured VM instance instance-00000059 to attach disk [datastore1] b4427ba0-4dcf-4b21-a584-a7fee560f135/b4427ba0-4dcf-4b21-a584-a7fee560f135.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1738.099329] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9a8bbc4-e58f-420a-a8f2-2343b3de3956 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.105845] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776491, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.107289] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1738.107289] env[62508]: value = "task-1776492" [ 1738.107289] env[62508]: _type = "Task" [ 1738.107289] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.115597] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776492, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.139756] env[62508]: DEBUG oslo_concurrency.lockutils [None req-24fd8645-4a9c-49d2-ad53-488370ecc7ee tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.303s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.296038] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 30e8b6ca-10fd-4e98-815d-1622f162b05c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1738.306265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.306566] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.306785] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.306966] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.307222] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.310809] env[62508]: INFO nova.compute.manager [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Terminating instance [ 1738.313015] env[62508]: DEBUG nova.compute.manager [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1738.313232] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.313507] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3199a574-fb6a-43ee-ae16-9f37a1ccf95f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.320367] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1738.320367] env[62508]: value = "task-1776493" [ 1738.320367] env[62508]: _type = "Task" [ 1738.320367] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.330746] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.381857] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1738.415782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.427981] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.838s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.430194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.134s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.430322] env[62508]: DEBUG nova.objects.instance [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'resources' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.447618] env[62508]: INFO nova.scheduler.client.report [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocations for instance 145306d7-f0e8-46c0-b2ab-1c41c208f976 [ 1738.502040] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1738.502557] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-260a709f-0579-44cd-abb5-4073f3298927 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.509567] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1738.509567] env[62508]: value = "task-1776494" [ 1738.509567] env[62508]: _type = "Task" [ 1738.509567] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.518767] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.520706] env[62508]: DEBUG nova.network.neutron [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.610741] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776491, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071833} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.614107] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.614878] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e70809-abb3-441d-acf1-9fb395cf8106 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.622455] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776492, 'name': Rename_Task, 'duration_secs': 0.167767} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.631941] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1738.641084] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7/954e23bc-3355-4ab6-ad81-ea7bc55b6ee7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.641392] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3490db3-30b9-4e11-9103-66f50309b2a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.643349] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5535f056-fd57-43aa-8e55-0b15f922ffd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.664038] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1738.664038] env[62508]: value = "task-1776496" [ 1738.664038] env[62508]: _type = "Task" [ 1738.664038] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.665333] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1738.665333] env[62508]: value = "task-1776495" [ 1738.665333] env[62508]: _type = "Task" [ 1738.665333] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.678891] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.799980] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2b166aa9-9381-42c0-a607-7d610f08a4e3] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1738.834274] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776493, 'name': PowerOffVM_Task, 'duration_secs': 0.224609} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.834596] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1738.834840] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1738.835126] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368791', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'name': 'volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3e1c48d-9aaf-415f-8234-82a71bb469ee', 'attached_at': '', 'detached_at': '', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'serial': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1738.836066] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d157f35-12ce-40a0-a7da-b1925f28d06d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.859621] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093642e0-6346-4cc0-bfe7-138a12a86fd9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.867913] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6ced0c-c4f6-4451-9a5c-dfd5237f0795 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.890791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affaa2bc-cb6f-451b-b726-22791e13fd36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.908603] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] The volume has not been displaced from its original location: [datastore1] volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c/volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1738.914037] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1738.914519] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7f590fe-da8a-4019-8255-b0c6dde34e3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.933245] env[62508]: DEBUG nova.objects.instance [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'numa_topology' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.934341] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1738.934341] env[62508]: value = "task-1776497" [ 1738.934341] env[62508]: _type = "Task" [ 1738.934341] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.942482] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.957044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9b6a558d-ab9b-441e-ad18-4db07597f4a4 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "145306d7-f0e8-46c0-b2ab-1c41c208f976" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.223s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.022779] env[62508]: DEBUG oslo_vmware.api [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776494, 'name': PowerOnVM_Task, 'duration_secs': 0.392931} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.022779] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1739.022779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9378012e-f3a9-4841-859b-eaaaa0cba131 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance '95a289ac-3178-45ea-80d2-905b9af54f3c' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1739.025214] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.025789] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Instance network_info: |[{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1739.025954] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:d2:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87ce4777-2520-4432-a1ed-03e189684761', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1739.034563] env[62508]: DEBUG oslo.service.loopingcall [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.034828] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1739.035102] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-692de167-1829-4c0d-8d46-c9c6044ade7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.055262] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1739.055262] env[62508]: value = "task-1776498" [ 1739.055262] env[62508]: _type = "Task" [ 1739.055262] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.066144] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776498, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.184737] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776495, 'name': PowerOnVM_Task, 'duration_secs': 0.513052} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.184986] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776496, 'name': ReconfigVM_Task, 'duration_secs': 0.439555} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.185237] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1739.185434] env[62508]: INFO nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1739.185615] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1739.185896] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7/954e23bc-3355-4ab6-ad81-ea7bc55b6ee7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.187056] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fe82b4-bbe2-44b2-bdd9-3dd5d089d567 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.189657] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cdf12d43-f5a2-473e-8731-f08ad7662a93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.194473] env[62508]: DEBUG nova.compute.manager [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Received event network-changed-87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1739.194663] env[62508]: DEBUG nova.compute.manager [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Refreshing instance network info cache due to event network-changed-87ce4777-2520-4432-a1ed-03e189684761. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1739.194877] env[62508]: DEBUG oslo_concurrency.lockutils [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.195033] env[62508]: DEBUG oslo_concurrency.lockutils [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.195183] env[62508]: DEBUG nova.network.neutron [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Refreshing network info cache for port 87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1739.207021] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1739.207021] env[62508]: value = "task-1776499" [ 1739.207021] env[62508]: _type = "Task" [ 1739.207021] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.215658] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776499, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.303213] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aedbd388-3ef7-410f-b0e3-5ea67ad56b65] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.435366] env[62508]: DEBUG nova.objects.base [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1739.446921] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.568593] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776498, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.631197] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1739.631197] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1739.631197] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleting the datastore file [datastore1] a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.631366] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5a91afb-deb9-4305-a085-c085c790c0cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.639651] env[62508]: DEBUG oslo_vmware.api [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1739.639651] env[62508]: value = "task-1776500" [ 1739.639651] env[62508]: _type = "Task" [ 1739.639651] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.661724] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366accad-393e-487f-9c16-1860e64366a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.669278] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a98692-3278-4467-94cb-a97715c1922a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.703143] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f99c290-7036-4e1b-b313-6cbcf02662c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.717149] env[62508]: INFO nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Took 19.08 seconds to build instance. [ 1739.720227] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a16c6c-833b-4dcf-8170-02d5dc3ed52b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.727568] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776499, 'name': Rename_Task, 'duration_secs': 0.236515} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.728308] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1739.728606] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dc926dc-2b0c-41bc-8f87-bd3c418bc0fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.738190] env[62508]: DEBUG nova.compute.provider_tree [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1739.745430] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1739.745430] env[62508]: value = "task-1776501" [ 1739.745430] env[62508]: _type = "Task" [ 1739.745430] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.756623] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.806828] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 879f1e09-8b21-4f89-bc00-04e3d6710662] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.949849] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.985209] env[62508]: DEBUG nova.network.neutron [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updated VIF entry in instance network info cache for port 87ce4777-2520-4432-a1ed-03e189684761. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1739.985591] env[62508]: DEBUG nova.network.neutron [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.066368] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776498, 'name': CreateVM_Task, 'duration_secs': 0.86391} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.066533] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1740.067261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.067426] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.067750] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1740.068014] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba3c23e-e310-4ed7-836c-36974709414d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.072852] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1740.072852] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201825f-7bd4-94e4-f94d-1d66e6f8aa60" [ 1740.072852] env[62508]: _type = "Task" [ 1740.072852] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.082675] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201825f-7bd4-94e4-f94d-1d66e6f8aa60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.148963] env[62508]: DEBUG oslo_vmware.api [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288922} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.149228] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1740.149434] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1740.149616] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1740.149786] env[62508]: INFO nova.compute.manager [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Took 3.40 seconds to destroy the instance on the hypervisor. [ 1740.150037] env[62508]: DEBUG oslo.service.loopingcall [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1740.150258] env[62508]: DEBUG nova.compute.manager [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1740.150357] env[62508]: DEBUG nova.network.neutron [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1740.220035] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.588s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.243031] env[62508]: DEBUG nova.scheduler.client.report [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1740.255174] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776501, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.310629] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 45de6dd5-97f3-4eea-a171-0254a2b37a41] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.451046] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.489357] env[62508]: DEBUG oslo_concurrency.lockutils [req-38575fbc-f968-462d-b6c8-a42baacfa154 req-69b2ff46-067f-4c96-9d46-79897aabc014 service nova] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.582455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.582757] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.588776] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5201825f-7bd4-94e4-f94d-1d66e6f8aa60, 'name': SearchDatastore_Task, 'duration_secs': 0.018904} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.589281] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.589519] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.589748] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.589895] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.590087] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.590388] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cef2cf4-79a0-408c-b977-11fbd48dcf6b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.614356] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.614573] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.615405] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04efbe8f-9643-4fc2-9437-4dc8666fd337 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.621982] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1740.621982] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fcb0c1-3d02-e032-3d60-fa367dfb50a2" [ 1740.621982] env[62508]: _type = "Task" [ 1740.621982] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.630050] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fcb0c1-3d02-e032-3d60-fa367dfb50a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.747117] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.317s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.749682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.527s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.751030] env[62508]: DEBUG nova.objects.instance [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lazy-loading 'resources' on Instance uuid bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1740.762559] env[62508]: DEBUG oslo_vmware.api [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776501, 'name': PowerOnVM_Task, 'duration_secs': 0.853782} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.762559] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.762559] env[62508]: INFO nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Took 8.03 seconds to spawn the instance on the hypervisor. [ 1740.762781] env[62508]: DEBUG nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1740.763485] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c641cf-8a45-4049-9bc1-b8e49d530dbb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.814925] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 06baedda-2926-4ec8-a4f6-d62713f48a26] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.949636] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.091031] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1741.135379] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52fcb0c1-3d02-e032-3d60-fa367dfb50a2, 'name': SearchDatastore_Task, 'duration_secs': 0.030166} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.136516] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cf33bd6-0b5d-4e9e-8efd-0f22ee93a4e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.142441] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1741.142441] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b71af1-47ad-00c3-2e39-a15244aba60c" [ 1741.142441] env[62508]: _type = "Task" [ 1741.142441] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.151400] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b71af1-47ad-00c3-2e39-a15244aba60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.161190] env[62508]: DEBUG nova.network.neutron [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.263037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-500177da-deea-4b32-b956-c85c82f61da4 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.634s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.263866] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.498s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.264827] env[62508]: INFO nova.compute.manager [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Unshelving [ 1741.291372] env[62508]: INFO nova.compute.manager [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Took 20.62 seconds to build instance. [ 1741.319025] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e6e5b77d-4fb8-4fad-83f4-c3beb9521ed6] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.330323] env[62508]: DEBUG nova.compute.manager [req-525a9e06-8b48-40c5-97b3-e105c45b89fb req-7dfa9552-8235-4136-9775-ca7c590ea57f service nova] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Received event network-vif-deleted-be5b5d9b-1f22-455a-b4f6-128f17030129 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1741.449452] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776497, 'name': ReconfigVM_Task, 'duration_secs': 2.253102} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.451872] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1741.457594] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2febc318-5762-4b70-8d64-48f6b596aba6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.474125] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1741.474125] env[62508]: value = "task-1776502" [ 1741.474125] env[62508]: _type = "Task" [ 1741.474125] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.482507] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.558926] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c11184-1b6b-43a5-9dc6-a1edc8036bbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.566446] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afc85f3-bda7-4e70-9669-b95bec9fc382 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.599116] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1d6e5a-ba0b-4c11-9ed6-b1260f73889f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.608789] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490f8e0f-8e52-4cc7-b96e-bf7fe3ce30fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.613904] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.614312] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.614525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.614720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.614894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.615072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.625903] env[62508]: DEBUG nova.compute.provider_tree [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1741.627412] env[62508]: INFO nova.compute.manager [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Terminating instance [ 1741.629277] env[62508]: DEBUG nova.compute.manager [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1741.629536] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.630327] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299b2a97-5931-4a45-bd50-b6857ea5855c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.637635] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.637864] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4907ad41-8c70-4374-9ffa-f4525f9b7c4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.648282] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1741.648282] env[62508]: value = "task-1776503" [ 1741.648282] env[62508]: _type = "Task" [ 1741.648282] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.651697] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b71af1-47ad-00c3-2e39-a15244aba60c, 'name': SearchDatastore_Task, 'duration_secs': 0.028554} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.654914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.655238] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1741.655762] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44fa60be-ddc8-4241-9afa-de51b4f13781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.663963] env[62508]: INFO nova.compute.manager [-] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Took 1.51 seconds to deallocate network for instance. [ 1741.664268] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776503, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.666822] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1741.666822] env[62508]: value = "task-1776504" [ 1741.666822] env[62508]: _type = "Task" [ 1741.666822] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.677648] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.690252] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.793363] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a0407c48-fd9c-4743-a001-cd5c84d70d7a tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.133s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.793677] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.104s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.793915] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.794140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.794313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.796741] env[62508]: INFO nova.compute.manager [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Terminating instance [ 1741.798998] env[62508]: DEBUG nova.compute.manager [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1741.799222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.800158] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8dc016-74d2-41ca-931b-ed3d5a00d58d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.808766] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.809008] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b64caf4-88ac-4f0e-9608-d494c79aedbb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.815648] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1741.815648] env[62508]: value = "task-1776505" [ 1741.815648] env[62508]: _type = "Task" [ 1741.815648] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.826074] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 9a3ef326-0fbf-4fd2-bb5e-3009bf661381] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.828049] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.860414] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.860616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.860810] env[62508]: DEBUG nova.compute.manager [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Going to confirm migration 6 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1741.984940] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776502, 'name': ReconfigVM_Task, 'duration_secs': 0.196364} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.985278] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368791', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'name': 'volume-7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3e1c48d-9aaf-415f-8234-82a71bb469ee', 'attached_at': '', 'detached_at': '', 'volume_id': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c', 'serial': '7d9b661a-9d4e-4f18-b54b-a757d6cdd88c'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1741.985582] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.986557] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625ebe87-bae0-4d86-a909-ccdfbfdc345f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.994315] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1741.994645] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f52c3604-1ca7-49dc-a8ae-ba3a65df82df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.151497] env[62508]: ERROR nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] [req-a2b7ae8c-6634-4f3d-bb49-ff208a92c957] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a2b7ae8c-6634-4f3d-bb49-ff208a92c957"}]} [ 1742.159604] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1742.159830] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1742.160020] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] f3e1c48d-9aaf-415f-8234-82a71bb469ee {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1742.160646] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a27fd23-3b23-4076-84c7-d39ea5e7c360 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.165849] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776503, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.171084] env[62508]: DEBUG nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1742.174613] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.174932] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1742.174932] env[62508]: value = "task-1776507" [ 1742.174932] env[62508]: _type = "Task" [ 1742.174932] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.182807] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496833} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.183624] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1742.183957] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1742.184263] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be9a4684-98fc-4995-922b-c0b3d098659d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.189141] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.190079] env[62508]: DEBUG nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1742.190288] env[62508]: DEBUG nova.compute.provider_tree [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1742.197943] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1742.197943] env[62508]: value = "task-1776508" [ 1742.197943] env[62508]: _type = "Task" [ 1742.197943] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.207777] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.208135] env[62508]: DEBUG nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1742.227611] env[62508]: DEBUG nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1742.273970] env[62508]: DEBUG nova.compute.utils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1742.331211] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 63fca45d-5922-4a14-9936-30070c349f8e] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.333506] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776505, 'name': PowerOffVM_Task, 'duration_secs': 0.502174} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.333850] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.334061] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1742.334524] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82d6a354-e7bd-41b7-883d-9ea060e7c3b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.424593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.424593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.424593] env[62508]: DEBUG nova.network.neutron [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1742.424593] env[62508]: DEBUG nova.objects.instance [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'info_cache' on Instance uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.441408] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1742.441660] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1742.442056] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleting the datastore file [datastore1] 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1742.442384] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7221e2c-7d6e-48ff-b825-d5b35300811b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.452121] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1742.452121] env[62508]: value = "task-1776510" [ 1742.452121] env[62508]: _type = "Task" [ 1742.452121] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.464534] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.493437] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dabf72c-e3d9-4c62-a692-594af76d1708 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.501098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4187c4-8ac7-4cec-8187-020b2e4707a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.532314] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8d2d9f-b1c2-49b1-9970-8e6a53332697 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.540243] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e25576f-0359-40be-a5a9-11602a947ecb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.553959] env[62508]: DEBUG nova.compute.provider_tree [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.661715] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776503, 'name': PowerOffVM_Task, 'duration_secs': 0.659315} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.661988] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.662187] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1742.662438] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-100683c2-ad4c-4529-b3ec-15590cef2be3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.685713] env[62508]: DEBUG oslo_vmware.api [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230846} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.685946] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.686144] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.686331] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.686500] env[62508]: INFO nova.compute.manager [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Took 4.37 seconds to destroy the instance on the hypervisor. [ 1742.686735] env[62508]: DEBUG oslo.service.loopingcall [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.686918] env[62508]: DEBUG nova.compute.manager [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1742.687015] env[62508]: DEBUG nova.network.neutron [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1742.696785] env[62508]: DEBUG oslo_concurrency.lockutils [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.696785] env[62508]: DEBUG oslo_concurrency.lockutils [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.706134] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109374} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.706954] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1742.707737] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07008f76-124a-4bdb-8f7f-32acf6211a85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.730162] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1742.732570] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7f94a12-b670-40fa-902b-697fba50c397 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.752144] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1742.752144] env[62508]: value = "task-1776512" [ 1742.752144] env[62508]: _type = "Task" [ 1742.752144] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.760107] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.777911] env[62508]: INFO nova.virt.block_device [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Booting with volume 6a9e4102-a8e2-41b9-9290-7b0979ea805b at /dev/sdb [ 1742.802960] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1742.802960] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1742.803738] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleting the datastore file [datastore1] b4427ba0-4dcf-4b21-a584-a7fee560f135 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1742.803738] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-270d9f57-be78-48f8-a4af-1541080caa69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.810249] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for the task: (returnval){ [ 1742.810249] env[62508]: value = "task-1776513" [ 1742.810249] env[62508]: _type = "Task" [ 1742.810249] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.818704] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.819814] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfd7dc76-b2eb-45fa-8de7-52f0335606a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.827337] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8e4ead-f34c-4d2f-ace4-1cee3a31e973 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.837942] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 38d294a9-2f51-438d-b942-a88e380a981f] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.860231] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-935a2856-9f3d-43ca-acb9-5ba390889265 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.869481] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4af48d-a5d7-4761-b0e9-ed1aa79056b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.900263] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8ef924-1be1-466d-89d1-17310e1b099d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.906531] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbd6a7d-65e1-4535-abd0-0fc0094a2a55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.920606] env[62508]: DEBUG nova.virt.block_device [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating existing volume attachment record: 4649df4e-fc5c-44de-a5a1-5963166523ef {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1742.962622] env[62508]: DEBUG oslo_vmware.api [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35061} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.962907] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.963101] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.963286] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.964030] env[62508]: INFO nova.compute.manager [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1742.964030] env[62508]: DEBUG oslo.service.loopingcall [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.964030] env[62508]: DEBUG nova.compute.manager [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1742.964030] env[62508]: DEBUG nova.network.neutron [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1743.057654] env[62508]: DEBUG nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1743.199920] env[62508]: INFO nova.compute.manager [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Detaching volume 14a689d7-0dd2-4b7e-b497-37fc35863218 [ 1743.203684] env[62508]: DEBUG nova.compute.manager [req-f20f274f-5f08-4c9c-a888-006c51d1eafd req-75a7d0fd-0e73-42be-b023-33e39db140dd service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Received event network-vif-deleted-153213b8-b5cb-4074-8748-81bb5e028c02 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.203875] env[62508]: INFO nova.compute.manager [req-f20f274f-5f08-4c9c-a888-006c51d1eafd req-75a7d0fd-0e73-42be-b023-33e39db140dd service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Neutron deleted interface 153213b8-b5cb-4074-8748-81bb5e028c02; detaching it from the instance and deleting it from the info cache [ 1743.204244] env[62508]: DEBUG nova.network.neutron [req-f20f274f-5f08-4c9c-a888-006c51d1eafd req-75a7d0fd-0e73-42be-b023-33e39db140dd service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.232202] env[62508]: INFO nova.virt.block_device [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Attempting to driver detach volume 14a689d7-0dd2-4b7e-b497-37fc35863218 from mountpoint /dev/sdb [ 1743.232516] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1743.232762] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368777', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'name': 'volume-14a689d7-0dd2-4b7e-b497-37fc35863218', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a', 'attached_at': '', 'detached_at': '', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'serial': '14a689d7-0dd2-4b7e-b497-37fc35863218'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1743.233643] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a186528b-4cc3-4eaa-9e34-34409a1ee5b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.259073] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fb4175-dddb-487b-b2d9-28730c28fbf0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.266445] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.268955] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fbf8b2-e364-4752-b672-b1cff1a9114d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.290371] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4b47a2-5057-4438-874d-185909c7f0a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.307193] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] The volume has not been displaced from its original location: [datastore1] volume-14a689d7-0dd2-4b7e-b497-37fc35863218/volume-14a689d7-0dd2-4b7e-b497-37fc35863218.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1743.312884] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfiguring VM instance instance-00000038 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1743.312976] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd38cfe8-52bc-4adc-a7c0-86e2eb950575 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.333384] env[62508]: DEBUG oslo_vmware.api [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Task: {'id': task-1776513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307576} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.334689] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1743.334804] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1743.335016] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1743.335205] env[62508]: INFO nova.compute.manager [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1743.335445] env[62508]: DEBUG oslo.service.loopingcall [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1743.335694] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1743.335694] env[62508]: value = "task-1776517" [ 1743.335694] env[62508]: _type = "Task" [ 1743.335694] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.335876] env[62508]: DEBUG nova.compute.manager [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1743.336037] env[62508]: DEBUG nova.network.neutron [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1743.341327] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2b6154bd-a6aa-4afc-b8f4-c9ceec3fd7b5] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.350288] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776517, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.368303] env[62508]: DEBUG nova.compute.manager [req-a3808b1a-71a4-4d98-9835-ea9ad8fc43e2 req-907e5e27-7600-46fb-8fb1-45b71798578f service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Received event network-vif-deleted-e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.368544] env[62508]: INFO nova.compute.manager [req-a3808b1a-71a4-4d98-9835-ea9ad8fc43e2 req-907e5e27-7600-46fb-8fb1-45b71798578f service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Neutron deleted interface e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8; detaching it from the instance and deleting it from the info cache [ 1743.368749] env[62508]: DEBUG nova.network.neutron [req-a3808b1a-71a4-4d98-9835-ea9ad8fc43e2 req-907e5e27-7600-46fb-8fb1-45b71798578f service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.485131] env[62508]: DEBUG nova.network.neutron [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.564624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.815s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.567141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.144s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.568647] env[62508]: INFO nova.compute.claims [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.604102] env[62508]: INFO nova.scheduler.client.report [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Deleted allocations for instance bee2cc61-b26c-4d2d-a2aa-ec79b8678e32 [ 1743.682727] env[62508]: DEBUG nova.network.neutron [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.706957] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2df8a6c-1473-45dd-90f5-8bbbb49ce207 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.718345] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137b5d05-099f-40d7-a3ea-a0cb339ea9d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.754027] env[62508]: DEBUG nova.compute.manager [req-f20f274f-5f08-4c9c-a888-006c51d1eafd req-75a7d0fd-0e73-42be-b023-33e39db140dd service nova] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Detach interface failed, port_id=153213b8-b5cb-4074-8748-81bb5e028c02, reason: Instance 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1743.763580] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776512, 'name': ReconfigVM_Task, 'duration_secs': 0.995359} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.763870] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.765537] env[62508]: DEBUG nova.network.neutron [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.766920] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35250b42-1ae2-4129-8849-9ddc3eb4c965 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.773130] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1743.773130] env[62508]: value = "task-1776518" [ 1743.773130] env[62508]: _type = "Task" [ 1743.773130] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.782507] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776518, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.851023] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 70c8de27-4696-4005-bbec-e7a33e56311b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.851023] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776517, 'name': ReconfigVM_Task, 'duration_secs': 0.472112} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.851023] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Reconfigured VM instance instance-00000038 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1743.855587] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64a09a8e-bd44-45ff-8a86-e47bb1d449c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.872121] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-764f2103-e759-4ced-a864-cd93c1661119 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.874533] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1743.874533] env[62508]: value = "task-1776519" [ 1743.874533] env[62508]: _type = "Task" [ 1743.874533] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.883274] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a16afc-9381-4f28-a1c9-05b512357d9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.899083] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776519, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.918709] env[62508]: DEBUG nova.compute.manager [req-a3808b1a-71a4-4d98-9835-ea9ad8fc43e2 req-907e5e27-7600-46fb-8fb1-45b71798578f service nova] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Detach interface failed, port_id=e4f1c3ea-4ff3-4929-8984-6e3d3cc11ff8, reason: Instance f3e1c48d-9aaf-415f-8234-82a71bb469ee could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1743.990230] env[62508]: INFO nova.compute.manager [-] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Took 1.30 seconds to deallocate network for instance. [ 1744.114726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9c64847f-30bc-47b0-9683-cde6a0e9885d tempest-ServersAdminTestJSON-1341054529 tempest-ServersAdminTestJSON-1341054529-project-member] Lock "bee2cc61-b26c-4d2d-a2aa-ec79b8678e32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.942s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.117995] env[62508]: DEBUG nova.network.neutron [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.188332] env[62508]: INFO nova.compute.manager [-] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Took 1.22 seconds to deallocate network for instance. [ 1744.269901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.270325] env[62508]: DEBUG nova.objects.instance [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'migration_context' on Instance uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.284384] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776518, 'name': Rename_Task, 'duration_secs': 0.148008} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.284715] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1744.284970] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fb13595-8cab-4f90-a581-8228a16147eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.293553] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1744.293553] env[62508]: value = "task-1776520" [ 1744.293553] env[62508]: _type = "Task" [ 1744.293553] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.301726] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.352067] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: db74146d-abc3-4d48-be1b-6ad471794dbf] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.387849] env[62508]: DEBUG oslo_vmware.api [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776519, 'name': ReconfigVM_Task, 'duration_secs': 0.218192} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.389250] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368777', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'name': 'volume-14a689d7-0dd2-4b7e-b497-37fc35863218', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a', 'attached_at': '', 'detached_at': '', 'volume_id': '14a689d7-0dd2-4b7e-b497-37fc35863218', 'serial': '14a689d7-0dd2-4b7e-b497-37fc35863218'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1744.540988] env[62508]: INFO nova.compute.manager [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Took 0.55 seconds to detach 1 volumes for instance. [ 1744.619963] env[62508]: INFO nova.compute.manager [-] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Took 1.28 seconds to deallocate network for instance. [ 1744.694558] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.773096] env[62508]: DEBUG nova.objects.base [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Object Instance<95a289ac-3178-45ea-80d2-905b9af54f3c> lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1744.774033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3088d5de-ba4e-40e0-82cd-d3037b720ee9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.795389] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eac5a82f-d151-42b1-9909-28216705e14d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.808025] env[62508]: DEBUG oslo_vmware.api [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1744.808025] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5297feb5-0d12-1534-02ab-24d354b9fbe6" [ 1744.808025] env[62508]: _type = "Task" [ 1744.808025] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.810411] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776520, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.822281] env[62508]: DEBUG oslo_vmware.api [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5297feb5-0d12-1534-02ab-24d354b9fbe6, 'name': SearchDatastore_Task, 'duration_secs': 0.008763} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.822852] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.829115] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdd5938-70ff-4565-bf53-941028ff24ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.835447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fbb0a7-f128-4101-a941-06f4e03e764b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.866618] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fac2fc84-ccaa-44b2-b8f3-7f9b6173c8c8] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.869459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04edc810-f087-4595-8fcb-fbbf9bec386f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.877709] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a839118-be26-4b5b-93d2-502ae7b135c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.896829] env[62508]: DEBUG nova.compute.provider_tree [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1745.047969] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.093321] env[62508]: DEBUG nova.objects.instance [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'flavor' on Instance uuid aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1745.131448] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.228751] env[62508]: DEBUG nova.compute.manager [req-b775505d-2337-46d8-84be-8700da1b3742 req-8a29df9f-9858-4edb-ab83-f465fee88298 service nova] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Received event network-vif-deleted-4f1783a7-f095-41fa-9b4f-cf3505149a6e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1745.307465] env[62508]: DEBUG oslo_vmware.api [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776520, 'name': PowerOnVM_Task, 'duration_secs': 0.862869} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.307691] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.308123] env[62508]: INFO nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1745.308123] env[62508]: DEBUG nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.308886] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bff5d9-2e46-4a7e-ab67-8ebb3329d07b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.373142] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 84098aee-4cfa-4bb4-a0e7-390a9f7ad6ad] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.433245] env[62508]: DEBUG nova.scheduler.client.report [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1745.433513] env[62508]: DEBUG nova.compute.provider_tree [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 145 to 146 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1745.433712] env[62508]: DEBUG nova.compute.provider_tree [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1745.827896] env[62508]: INFO nova.compute.manager [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Took 18.12 seconds to build instance. [ 1745.876016] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 18ee140a-97bd-439a-8027-0dd0a1f0a6e2] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.939021] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.939784] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1745.942757] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.854s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.946115] env[62508]: INFO nova.compute.claims [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1746.100722] env[62508]: DEBUG oslo_concurrency.lockutils [None req-053b352b-7012-48ca-b5f3-3fd28686f364 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.404s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.331206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9441fd07-f27c-4f2c-bfda-914ad7e7b5b2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.636s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.379163] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ffe54977-81c4-4842-9773-eed704a53ada] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.449589] env[62508]: DEBUG nova.compute.utils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.452887] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1746.453079] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.560148] env[62508]: DEBUG nova.policy [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '179dc1563a864ad69f687e12508549f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdc04dc308e44668828caa29bfdbe122', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1746.882252] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a239d78f-085a-4e5c-924d-cf338298fa73] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.953464] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1747.067709] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.067975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.068208] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.068395] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.068611] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.074031] env[62508]: INFO nova.compute.manager [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Terminating instance [ 1747.079782] env[62508]: DEBUG nova.compute.manager [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1747.079782] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1747.080333] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95429a61-1f53-4b07-bd8e-db080e2ffde0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.088906] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.091765] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-726933fa-ab2e-4142-bf9a-b16e00b9ad4b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.102020] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1747.102020] env[62508]: value = "task-1776522" [ 1747.102020] env[62508]: _type = "Task" [ 1747.102020] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.109263] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.231437] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd17d22-abe2-43aa-8edc-51332514f58f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.239633] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab54c5ec-65ff-4926-8463-d5782157e508 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.274385] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c00dc04-bcff-47ef-8716-03b8fe495a20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.282403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bed5fd-aa7b-4284-8ec2-fece404e172a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.300410] env[62508]: DEBUG nova.compute.provider_tree [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.352755] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Successfully created port: f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.386234] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ce74cbd8-b709-418b-a206-f51975fd0af1] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1747.506958] env[62508]: DEBUG nova.compute.manager [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Received event network-changed-87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1747.506958] env[62508]: DEBUG nova.compute.manager [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Refreshing instance network info cache due to event network-changed-87ce4777-2520-4432-a1ed-03e189684761. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1747.507976] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.507976] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.507976] env[62508]: DEBUG nova.network.neutron [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Refreshing network info cache for port 87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.609863] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776522, 'name': PowerOffVM_Task, 'duration_secs': 0.21596} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.610414] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1747.610705] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1747.611062] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a02b360-768a-42db-80d6-f7943ddddbe2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.804088] env[62508]: DEBUG nova.scheduler.client.report [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1747.845495] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1747.845495] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1747.845495] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleting the datastore file [datastore1] aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1747.845495] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd049dd2-beab-40c3-9abb-9a23707c96ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.855993] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1747.855993] env[62508]: value = "task-1776524" [ 1747.855993] env[62508]: _type = "Task" [ 1747.855993] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.866148] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.889050] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aae3b4a3-c954-4f73-bd12-9b19a675179c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1747.968362] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1748.004590] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.004740] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.007012] env[62508]: DEBUG nova.virt.hardware [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.007403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f168368f-640e-45b6-833a-3adeac915c2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.018609] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8e2645-4843-4063-83c5-de01bbcf5dad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.260806] env[62508]: DEBUG nova.network.neutron [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updated VIF entry in instance network info cache for port 87ce4777-2520-4432-a1ed-03e189684761. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.261199] env[62508]: DEBUG nova.network.neutron [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.263096] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "806102ec-7622-4770-91c9-8c5723893dec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.263227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.263430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "806102ec-7622-4770-91c9-8c5723893dec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.263611] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.263782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.265776] env[62508]: INFO nova.compute.manager [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Terminating instance [ 1748.267607] env[62508]: DEBUG nova.compute.manager [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1748.267827] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1748.269136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6459e520-6872-4b2c-a330-a9aec34da4c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.277769] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1748.278237] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17e1a71b-cf0c-42f7-81c3-661eeae39b9f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.284904] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1748.284904] env[62508]: value = "task-1776525" [ 1748.284904] env[62508]: _type = "Task" [ 1748.284904] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.293502] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.312236] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.313183] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1748.318039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.902s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.319847] env[62508]: INFO nova.compute.claims [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1748.370482] env[62508]: DEBUG oslo_vmware.api [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172857} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.370482] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.370666] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.370815] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.371052] env[62508]: INFO nova.compute.manager [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1748.371303] env[62508]: DEBUG oslo.service.loopingcall [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.371727] env[62508]: DEBUG nova.compute.manager [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1748.371817] env[62508]: DEBUG nova.network.neutron [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.395295] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2f7b7109-0ced-4ea4-8dde-608655f2b3ab] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.592523] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.592523] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.766450] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b208ce5-ee60-46d7-891a-33385476377e req-ab7ab6d0-83ed-4f9d-880d-b06eb852ea2a service nova] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.796205] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776525, 'name': PowerOffVM_Task, 'duration_secs': 0.207609} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.796205] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.796501] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1748.796862] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9bb9198-eb2d-447d-9308-5b636e267a19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.821277] env[62508]: DEBUG nova.compute.utils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1748.821277] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1748.821277] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1748.895172] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1748.895394] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1748.895582] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Deleting the datastore file [datastore1] 806102ec-7622-4770-91c9-8c5723893dec {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.895914] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7c66d4d-f178-4e1c-9e19-f3cea52c428f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.901982] env[62508]: DEBUG nova.policy [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '179dc1563a864ad69f687e12508549f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdc04dc308e44668828caa29bfdbe122', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1748.903664] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b297d642-88a7-4acc-a94d-e1cb7df81982] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.907836] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for the task: (returnval){ [ 1748.907836] env[62508]: value = "task-1776527" [ 1748.907836] env[62508]: _type = "Task" [ 1748.907836] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.917081] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.927623] env[62508]: DEBUG nova.compute.manager [req-6fa0e7bf-1f4d-467a-bdb9-558f297d4ff6 req-10926a86-cb46-4871-aab2-3501670c805b service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Received event network-vif-deleted-458dc468-1ae9-4f09-b0e2-4c866362fb80 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1748.927945] env[62508]: INFO nova.compute.manager [req-6fa0e7bf-1f4d-467a-bdb9-558f297d4ff6 req-10926a86-cb46-4871-aab2-3501670c805b service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Neutron deleted interface 458dc468-1ae9-4f09-b0e2-4c866362fb80; detaching it from the instance and deleting it from the info cache [ 1748.928038] env[62508]: DEBUG nova.network.neutron [req-6fa0e7bf-1f4d-467a-bdb9-558f297d4ff6 req-10926a86-cb46-4871-aab2-3501670c805b service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.050143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.094550] env[62508]: INFO nova.compute.manager [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Detaching volume 08bd2c21-2d3a-43bb-988d-08d5d2dfa691 [ 1749.145885] env[62508]: INFO nova.virt.block_device [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Attempting to driver detach volume 08bd2c21-2d3a-43bb-988d-08d5d2dfa691 from mountpoint /dev/sdb [ 1749.146154] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1749.146343] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368783', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'name': 'volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '24091abb-f71f-4528-8fc5-b97725cf079e', 'attached_at': '', 'detached_at': '', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'serial': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1749.147271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d79dea-750c-4073-b8ac-29f41b50f7d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.173867] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333eb798-8159-43c6-9e76-9cde69252e83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.183397] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141b5977-bfbd-4535-aac8-8587c7ea2961 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.205510] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911bc3e2-9da5-4157-ae28-09c72b3a91af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.208894] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Successfully created port: 65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1749.211564] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Successfully updated port: f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.230024] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] The volume has not been displaced from its original location: [datastore1] volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691/volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1749.236698] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1749.237593] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ad4a3ab-6dde-40fe-a78a-0015f343721b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.255863] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1749.255863] env[62508]: value = "task-1776528" [ 1749.255863] env[62508]: _type = "Task" [ 1749.255863] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.263974] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.327717] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1749.391498] env[62508]: DEBUG nova.network.neutron [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.409255] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a226327d-11df-45e0-bef8-2337a0317c9e] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.421469] env[62508]: DEBUG oslo_vmware.api [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Task: {'id': task-1776527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17722} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.421710] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1749.421890] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1749.423108] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1749.423423] env[62508]: INFO nova.compute.manager [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1749.423694] env[62508]: DEBUG oslo.service.loopingcall [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1749.424466] env[62508]: DEBUG nova.compute.manager [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1749.424466] env[62508]: DEBUG nova.network.neutron [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1749.430754] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca5885fa-5cd8-4619-8347-e52eaada9134 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.440521] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d03c3ca-689d-4274-9a77-9b046d021fa4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.477555] env[62508]: DEBUG nova.compute.manager [req-6fa0e7bf-1f4d-467a-bdb9-558f297d4ff6 req-10926a86-cb46-4871-aab2-3501670c805b service nova] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Detach interface failed, port_id=458dc468-1ae9-4f09-b0e2-4c866362fb80, reason: Instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1749.535720] env[62508]: DEBUG nova.compute.manager [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Received event network-vif-plugged-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1749.535996] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.536146] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.536306] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.536466] env[62508]: DEBUG nova.compute.manager [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] No waiting events found dispatching network-vif-plugged-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1749.536619] env[62508]: WARNING nova.compute.manager [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Received unexpected event network-vif-plugged-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 for instance with vm_state building and task_state spawning. [ 1749.536769] env[62508]: DEBUG nova.compute.manager [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Received event network-changed-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1749.536913] env[62508]: DEBUG nova.compute.manager [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Refreshing instance network info cache due to event network-changed-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1749.538198] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Acquiring lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.538389] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Acquired lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.538571] env[62508]: DEBUG nova.network.neutron [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Refreshing network info cache for port f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1749.657460] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4e64e1-1d7c-48b7-875c-93d0279bac6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.665615] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27ff184-3262-4b23-96ce-46ef7e1000ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.696357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9110c17-b6ad-4eda-a748-53b7591219bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.703730] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3329a1-4484-415c-a9f0-19a2f5173ccd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.716903] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.717348] env[62508]: DEBUG nova.compute.provider_tree [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.766699] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776528, 'name': ReconfigVM_Task, 'duration_secs': 0.270248} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.766969] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1749.771852] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad6d7bf9-a97b-45d5-9ce5-ce7c3aa96fc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.786467] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1749.786467] env[62508]: value = "task-1776529" [ 1749.786467] env[62508]: _type = "Task" [ 1749.786467] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.794090] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776529, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.893855] env[62508]: INFO nova.compute.manager [-] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Took 1.52 seconds to deallocate network for instance. [ 1749.912179] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 73452964-d690-451d-98c3-fba3c3301c6d] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.075891] env[62508]: DEBUG nova.network.neutron [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1750.173652] env[62508]: DEBUG nova.network.neutron [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.220792] env[62508]: DEBUG nova.scheduler.client.report [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.302487] env[62508]: DEBUG oslo_vmware.api [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776529, 'name': ReconfigVM_Task, 'duration_secs': 0.209592} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.302968] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368783', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'name': 'volume-08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '24091abb-f71f-4528-8fc5-b97725cf079e', 'attached_at': '', 'detached_at': '', 'volume_id': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691', 'serial': '08bd2c21-2d3a-43bb-988d-08d5d2dfa691'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1750.336286] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1750.365688] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.366444] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.366703] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.366920] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.367118] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.367302] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.367456] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.367606] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.367764] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.367947] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.368143] env[62508]: DEBUG nova.virt.hardware [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.369136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e58669-2a3f-4175-bb6f-b0c0a0b7baa3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.379562] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511ff5d8-aab8-4cd1-884a-12d1a9e39120 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.400446] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.416134] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e652e59f-9432-41cf-b4a5-0f5cf649b24e] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.676771] env[62508]: DEBUG oslo_concurrency.lockutils [req-19f42962-5850-4a8a-a63a-d74ff13f6380 req-3d853f88-4481-4389-9665-56cad8a702d0 service nova] Releasing lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.677181] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.677343] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1750.725931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.726942] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1750.731992] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.117s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.735481] env[62508]: INFO nova.compute.claims [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1750.852993] env[62508]: DEBUG nova.objects.instance [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1750.910486] env[62508]: DEBUG nova.network.neutron [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.919488] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 42eb98a9-e341-4a17-9d76-2a2c37efc1a1] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.215522] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1751.239277] env[62508]: DEBUG nova.compute.utils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1751.248065] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1751.248165] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1751.302454] env[62508]: DEBUG nova.policy [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '179dc1563a864ad69f687e12508549f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdc04dc308e44668828caa29bfdbe122', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1751.410066] env[62508]: DEBUG nova.network.neutron [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Updating instance_info_cache with network_info: [{"id": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "address": "fa:16:3e:6b:09:6d", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf79ec10a-7a", "ovs_interfaceid": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.414020] env[62508]: INFO nova.compute.manager [-] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Took 1.99 seconds to deallocate network for instance. [ 1751.424300] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b911f25d-711b-411e-bb2d-2e59386ff2ea] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.700327] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Successfully created port: 6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1751.748631] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1751.862512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b6bafc08-ffd2-444e-8760-5615bcc34fbb tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.271s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.888141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.888502] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.888752] env[62508]: DEBUG nova.compute.manager [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1751.889947] env[62508]: DEBUG nova.compute.manager [req-0f0bd636-453f-4809-a887-f35afb98a9d9 req-7cd7d688-06d5-49ee-a402-4c9846690d56 service nova] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Received event network-vif-deleted-5c6d20a1-1c14-4874-b295-9828a9172d8d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1751.893760] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9facca-886c-45fe-a133-760b645471b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.901616] env[62508]: DEBUG nova.compute.manager [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1751.902286] env[62508]: DEBUG nova.objects.instance [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1751.914240] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.914884] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance network_info: |[{"id": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "address": "fa:16:3e:6b:09:6d", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf79ec10a-7a", "ovs_interfaceid": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1751.915370] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:09:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f79ec10a-7a06-4ee2-8de0-4db1e03d23d1', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1751.925236] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Creating folder: Project (bdc04dc308e44668828caa29bfdbe122). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.932470] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.932751] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c7f7590-93e8-4fc5-a81d-3811eb1932f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.935241] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 7d23d8f0-d7a9-4236-ad28-208e77b72138] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.947986] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Created folder: Project (bdc04dc308e44668828caa29bfdbe122) in parent group-v368536. [ 1751.948208] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Creating folder: Instances. Parent ref: group-v368797. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.948464] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c829779-be34-4115-b037-5511df10989b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.960628] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Created folder: Instances in parent group-v368797. [ 1751.961058] env[62508]: DEBUG oslo.service.loopingcall [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.961188] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.961634] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67cf6541-afcc-4f14-9fd6-8656d2b30d91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.988717] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.988717] env[62508]: value = "task-1776532" [ 1751.988717] env[62508]: _type = "Task" [ 1751.988717] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.997476] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.084501] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d8e7cd-ac57-492e-973d-ef9b708b6813 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.092236] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9c7c25-6dc2-4d20-9fb0-12c0d43ebb7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.126849] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd90fa5-9c28-4905-88eb-cfbe3586c4a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.138649] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c166ef-6631-4c1f-b323-c1116ff5d682 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.153481] env[62508]: DEBUG nova.compute.provider_tree [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.407513] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.407806] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3658d67c-bd48-493a-869d-bea896784556 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.415070] env[62508]: DEBUG oslo_vmware.api [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1752.415070] env[62508]: value = "task-1776533" [ 1752.415070] env[62508]: _type = "Task" [ 1752.415070] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.424110] env[62508]: DEBUG oslo_vmware.api [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.438233] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 03552483-a365-4d25-94bc-ea9b38ee6cd6] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.498656] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.657291] env[62508]: DEBUG nova.scheduler.client.report [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1752.767403] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1752.796555] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1752.796831] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1752.796994] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1752.797197] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1752.797337] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1752.797483] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1752.797687] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1752.797846] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1752.798014] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1752.798185] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1752.798362] env[62508]: DEBUG nova.virt.hardware [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1752.799287] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87308581-8b67-494d-a6d2-9ccd52e2fe17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.807235] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8747fad6-14e6-4a36-a177-4247e43b218c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.924620] env[62508]: DEBUG oslo_vmware.api [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776533, 'name': PowerOffVM_Task, 'duration_secs': 0.195993} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.924899] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1752.925088] env[62508]: DEBUG nova.compute.manager [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1752.925877] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5de6db-b0ea-4edc-84f8-cd59b0054653 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.941207] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 192995e7-82f5-41be-990d-d91b93f981e1] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1753.000624] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.157184] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.157406] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.163655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.164295] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1753.166781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.992s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.166864] env[62508]: DEBUG nova.objects.instance [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'resources' on Instance uuid a058273e-9c68-4d73-9149-ceb60c1c1cda {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1753.440771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dea73dfb-5e8a-40d2-b52a-eef6caf7ed17 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.444504] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e2d4c71b-1164-4c7d-9ffb-7f5489f92d32] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1753.500414] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.659589] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1753.670213] env[62508]: DEBUG nova.compute.utils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1753.671508] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1753.671741] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1753.730521] env[62508]: DEBUG nova.policy [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285fedd2e7fd4d259ca7fc57c3fcbf46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74c45615efbb425fbec8400f6d225892', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1753.947658] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 868cf942-f348-488d-b00a-af4c8b5efda5] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1754.002680] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.060024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4175d426-527d-404d-a232-cc3d0ad17668 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.066668] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbbff84-452d-4ad7-9dbf-b86810003d08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.102897] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Successfully created port: c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1754.105326] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e03021-19bf-4c37-b666-e09ea07dfa6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.113447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2863383a-7414-4f54-ab5f-717c20c27922 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.127912] env[62508]: DEBUG nova.compute.provider_tree [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.180455] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1754.182320] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.458830] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: ee99ff4d-9996-4cfa-b038-7b19aef27438] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1754.484821] env[62508]: DEBUG nova.objects.instance [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.502608] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.631529] env[62508]: DEBUG nova.scheduler.client.report [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1754.963049] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.963206] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances with incomplete migration {{(pid=62508) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1754.988290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.988472] env[62508]: DEBUG oslo_concurrency.lockutils [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.988732] env[62508]: DEBUG nova.network.neutron [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.988910] env[62508]: DEBUG nova.objects.instance [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'info_cache' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.002460] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.136309] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.138601] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.444s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.138895] env[62508]: DEBUG nova.objects.instance [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lazy-loading 'resources' on Instance uuid 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.171861] env[62508]: INFO nova.scheduler.client.report [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted allocations for instance a058273e-9c68-4d73-9149-ceb60c1c1cda [ 1755.189646] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1755.221848] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1755.222266] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1755.222520] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.222785] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1755.222959] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.223142] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1755.223364] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1755.223540] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1755.223715] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1755.224083] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1755.224083] env[62508]: DEBUG nova.virt.hardware [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1755.225031] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b868e399-6e83-438c-b08f-b527ca7e370f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.234331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b724e38-fcf6-4274-8aee-1ee79b06485b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.492739] env[62508]: DEBUG nova.objects.base [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Object Instance<24091abb-f71f-4528-8fc5-b97725cf079e> lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1755.504263] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.679691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f984dc43-58e1-49cb-8431-c6157c7ae865 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "a058273e-9c68-4d73-9149-ceb60c1c1cda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.938s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.835331] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f093335-2c48-487d-a5bb-13d5067e4f83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.843452] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477bd0a4-b226-459d-9f97-21e8b5a1bdfe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.873056] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6b732a-f9a1-4a1b-b444-45799d988146 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.880221] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f7ed4a-6122-4b6c-9677-f1d6f59afd3e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.895023] env[62508]: DEBUG nova.compute.provider_tree [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.962462] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.962705] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.008069] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.285409] env[62508]: DEBUG nova.network.neutron [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [{"id": "0eda6157-2402-4297-8eb5-07a5b94eba56", "address": "fa:16:3e:b7:4c:0b", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eda6157-24", "ovs_interfaceid": "0eda6157-2402-4297-8eb5-07a5b94eba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.399964] env[62508]: DEBUG nova.scheduler.client.report [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1756.470286] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.470423] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1756.507752] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776532, 'name': CreateVM_Task, 'duration_secs': 4.173193} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.508021] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1756.509153] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.509323] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.509640] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1756.509917] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4ff112-7ad5-41a6-b22c-eb822cadfde5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.514986] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1756.514986] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527dede3-d77d-4998-cdd5-5c44cfda5b93" [ 1756.514986] env[62508]: _type = "Task" [ 1756.514986] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.522835] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527dede3-d77d-4998-cdd5-5c44cfda5b93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.789373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "refresh_cache-24091abb-f71f-4528-8fc5-b97725cf079e" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.798071] env[62508]: DEBUG nova.compute.manager [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Received event network-vif-plugged-65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.798371] env[62508]: DEBUG oslo_concurrency.lockutils [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] Acquiring lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.798493] env[62508]: DEBUG oslo_concurrency.lockutils [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.798657] env[62508]: DEBUG oslo_concurrency.lockutils [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.798866] env[62508]: DEBUG nova.compute.manager [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] No waiting events found dispatching network-vif-plugged-65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1756.799126] env[62508]: WARNING nova.compute.manager [req-162bd617-1f95-4a31-b05b-757040db8302 req-619d673b-b10a-4a12-b071-a4a441580d6e service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Received unexpected event network-vif-plugged-65b1bfe7-bc3c-4538-9aab-563919b69ba3 for instance with vm_state building and task_state spawning. [ 1756.877897] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Successfully updated port: 65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1756.906521] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.908855] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.086s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.927546] env[62508]: INFO nova.scheduler.client.report [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted allocations for instance 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7 [ 1757.009550] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.009655] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.009719] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1757.025730] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527dede3-d77d-4998-cdd5-5c44cfda5b93, 'name': SearchDatastore_Task, 'duration_secs': 0.010913} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.026020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.026301] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1757.026581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.026809] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.027049] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1757.027343] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a36c5cb-19ca-483c-bb69-45c9586b41c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.035777] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1757.035963] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1757.036675] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-452ca9e3-1ab5-41fd-a024-3b15c0f344f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.042430] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1757.042430] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c89e0-b6fa-7a5c-2a6b-e0113c369ed2" [ 1757.042430] env[62508]: _type = "Task" [ 1757.042430] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.052549] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c89e0-b6fa-7a5c-2a6b-e0113c369ed2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.166763] env[62508]: DEBUG nova.compute.manager [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Received event network-vif-plugged-c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1757.166988] env[62508]: DEBUG oslo_concurrency.lockutils [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] Acquiring lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.167239] env[62508]: DEBUG oslo_concurrency.lockutils [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.167413] env[62508]: DEBUG oslo_concurrency.lockutils [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.167583] env[62508]: DEBUG nova.compute.manager [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] No waiting events found dispatching network-vif-plugged-c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1757.167750] env[62508]: WARNING nova.compute.manager [req-7ceb2573-dfda-4cd9-a511-bb33e6277060 req-daffd016-3f2b-4eee-b7da-b0d98c8c1dd9 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Received unexpected event network-vif-plugged-c262a662-8a1b-45b0-b251-df840455ae82 for instance with vm_state building and task_state spawning. [ 1757.253405] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Successfully updated port: c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.292172] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.292476] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24afc20f-bb9a-48d5-8558-93b1a92f74c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.301321] env[62508]: DEBUG oslo_vmware.api [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1757.301321] env[62508]: value = "task-1776534" [ 1757.301321] env[62508]: _type = "Task" [ 1757.301321] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.311665] env[62508]: DEBUG oslo_vmware.api [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.380324] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.380475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.380641] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.415152] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Successfully updated port: 6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.439029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a093f431-cb09-4d57-b8ab-34f36efc566c tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "954e23bc-3355-4ab6-ad81-ea7bc55b6ee7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.645s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.558695] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526c89e0-b6fa-7a5c-2a6b-e0113c369ed2, 'name': SearchDatastore_Task, 'duration_secs': 0.011991} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.566417] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46c35a17-b898-46e3-8e7e-a10891bf848b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.573042] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1757.573042] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f12b8-6495-b131-d110-b237b3c7d364" [ 1757.573042] env[62508]: _type = "Task" [ 1757.573042] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.582197] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f12b8-6495-b131-d110-b237b3c7d364, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.676880] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a129402-bbae-49cf-8c2d-6670b5ea3530 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.685735] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e68f85-d636-4a97-9e0a-32b608f6c16f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.720060] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0824f8-a432-4024-b6f0-6b9145dd398a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.727959] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db238d2e-5cd6-4b1e-ba33-429b1aceb856 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.741724] env[62508]: DEBUG nova.compute.provider_tree [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.756514] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.756604] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.756742] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.811610] env[62508]: DEBUG oslo_vmware.api [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.910337] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1757.917210] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.917354] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.917528] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1758.041140] env[62508]: DEBUG nova.network.neutron [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Updating instance_info_cache with network_info: [{"id": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "address": "fa:16:3e:5b:96:d2", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b1bfe7-bc", "ovs_interfaceid": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.064648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.064875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.084225] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520f12b8-6495-b131-d110-b237b3c7d364, 'name': SearchDatastore_Task, 'duration_secs': 0.00944} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.084622] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.084726] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b83dd148-8cf6-474b-bb19-e0822732b12a/b83dd148-8cf6-474b-bb19-e0822732b12a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1758.084974] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5d21dfc-37e9-465d-b796-f27751e24f63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.091335] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1758.091335] env[62508]: value = "task-1776535" [ 1758.091335] env[62508]: _type = "Task" [ 1758.091335] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.101231] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.246832] env[62508]: DEBUG nova.scheduler.client.report [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1758.268632] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [{"id": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "address": "fa:16:3e:f8:bf:1b", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9e88907-91", "ovs_interfaceid": "d9e88907-9194-4d90-87ab-d1b87ef0e48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.294672] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.312195] env[62508]: DEBUG oslo_vmware.api [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776534, 'name': PowerOnVM_Task, 'duration_secs': 0.569144} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.314698] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.314908] env[62508]: DEBUG nova.compute.manager [None req-69d6f7bd-5f70-49d1-a4a6-83a6f470ae4b tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1758.316076] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3256ca50-06d1-489a-9006-cf64411e1f38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.453353] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.456128] env[62508]: DEBUG nova.network.neutron [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updating instance_info_cache with network_info: [{"id": "c262a662-8a1b-45b0-b251-df840455ae82", "address": "fa:16:3e:8c:13:f8", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc262a662-8a", "ovs_interfaceid": "c262a662-8a1b-45b0-b251-df840455ae82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.543626] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.543957] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Instance network_info: |[{"id": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "address": "fa:16:3e:5b:96:d2", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b1bfe7-bc", "ovs_interfaceid": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1758.544457] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:96:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65b1bfe7-bc3c-4538-9aab-563919b69ba3', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1758.551928] env[62508]: DEBUG oslo.service.loopingcall [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.552155] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1758.552379] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29141637-c518-4428-bb63-32d65042b1ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.569425] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1758.580023] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1758.580023] env[62508]: value = "task-1776536" [ 1758.580023] env[62508]: _type = "Task" [ 1758.580023] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.589660] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776536, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.600954] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427584} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.604085] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b83dd148-8cf6-474b-bb19-e0822732b12a/b83dd148-8cf6-474b-bb19-e0822732b12a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1758.604085] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1758.604085] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1894889-9ea5-4309-94ce-0e4f0b6bad4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.609667] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1758.609667] env[62508]: value = "task-1776537" [ 1758.609667] env[62508]: _type = "Task" [ 1758.609667] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.618035] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.623329] env[62508]: DEBUG nova.network.neutron [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Updating instance_info_cache with network_info: [{"id": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "address": "fa:16:3e:7b:6d:37", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fe0e3e8-46", "ovs_interfaceid": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.771249] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-95a289ac-3178-45ea-80d2-905b9af54f3c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.771462] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1758.771936] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.772128] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.772287] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.772459] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.772610] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.772770] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.854024] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Received event network-changed-65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1758.854024] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Refreshing instance network info cache due to event network-changed-65b1bfe7-bc3c-4538-9aab-563919b69ba3. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1758.854024] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Acquiring lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.854145] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Acquired lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.854310] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Refreshing network info cache for port 65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1758.959087] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.959322] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Instance network_info: |[{"id": "c262a662-8a1b-45b0-b251-df840455ae82", "address": "fa:16:3e:8c:13:f8", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc262a662-8a", "ovs_interfaceid": "c262a662-8a1b-45b0-b251-df840455ae82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1758.959697] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:13:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c262a662-8a1b-45b0-b251-df840455ae82', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1758.968460] env[62508]: DEBUG oslo.service.loopingcall [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.968689] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1758.968955] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-290f5b54-ab06-4c21-80b5-a6b8ee1e68a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.990101] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1758.990101] env[62508]: value = "task-1776538" [ 1758.990101] env[62508]: _type = "Task" [ 1758.990101] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.997568] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776538, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.087066] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776536, 'name': CreateVM_Task, 'duration_secs': 0.35342} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.087245] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1759.087894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.088087] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.088396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1759.088640] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8afd5a5f-0fe0-4556-a2b3-091a7763c472 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.092950] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1759.092950] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9028c-de4d-af4f-f749-27e93d49ae1e" [ 1759.092950] env[62508]: _type = "Task" [ 1759.092950] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.093870] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.101272] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9028c-de4d-af4f-f749-27e93d49ae1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.118069] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068633} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.118069] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1759.118735] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05162ca0-0c7b-4791-8b77-1d1357fb47e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.134176] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.134467] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Instance network_info: |[{"id": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "address": "fa:16:3e:7b:6d:37", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fe0e3e8-46", "ovs_interfaceid": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1759.143199] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] b83dd148-8cf6-474b-bb19-e0822732b12a/b83dd148-8cf6-474b-bb19-e0822732b12a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1759.143565] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:6d:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fe0e3e8-4640-43e5-992e-718372bd92d1', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.150857] env[62508]: DEBUG oslo.service.loopingcall [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.151049] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9473b331-7ef6-40ef-bf53-c977480c7d23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.165219] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.165451] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33ec3fd4-dab8-489d-b5ab-74637ab0e439 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.185279] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1759.185279] env[62508]: value = "task-1776540" [ 1759.185279] env[62508]: _type = "Task" [ 1759.185279] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.186566] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.186566] env[62508]: value = "task-1776539" [ 1759.186566] env[62508]: _type = "Task" [ 1759.186566] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.195579] env[62508]: DEBUG nova.compute.manager [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Received event network-changed-c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1759.195795] env[62508]: DEBUG nova.compute.manager [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Refreshing instance network info cache due to event network-changed-c262a662-8a1b-45b0-b251-df840455ae82. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1759.196072] env[62508]: DEBUG oslo_concurrency.lockutils [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] Acquiring lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.196263] env[62508]: DEBUG oslo_concurrency.lockutils [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] Acquired lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.196462] env[62508]: DEBUG nova.network.neutron [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Refreshing network info cache for port c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.202599] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776540, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.206922] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776539, 'name': CreateVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.258940] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.350s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.261814] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.214s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.262066] env[62508]: DEBUG nova.objects.instance [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'resources' on Instance uuid f3e1c48d-9aaf-415f-8234-82a71bb469ee {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.282079] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Getting list of instances from cluster (obj){ [ 1759.282079] env[62508]: value = "domain-c8" [ 1759.282079] env[62508]: _type = "ClusterComputeResource" [ 1759.282079] env[62508]: } {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1759.283771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496e2bb5-0c63-4323-96e8-5813be0aa4d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.312829] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Got total of 8 instances {{(pid=62508) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1759.312829] env[62508]: WARNING nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] While synchronizing instance power states, found 14 instances in the database and 8 instances on the hypervisor. [ 1759.313032] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid de69dbf0-86f1-4b05-a9db-8b9afaabe49c {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.313230] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.313463] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.313679] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid a10a4217-ae46-4f00-9ba1-cdf74f44ec7b {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.313947] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.314185] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.314432] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 806102ec-7622-4770-91c9-8c5723893dec {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.314679] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid f3e1c48d-9aaf-415f-8234-82a71bb469ee {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.314928] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid b4427ba0-4dcf-4b21-a584-a7fee560f135 {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.315208] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 2aeb5a4c-785a-4238-8575-ecd1ff84b97c {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.315475] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.315754] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 5da47620-3979-44e8-91c5-154a1fe4ee48 {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.316022] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3 {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.316213] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Triggering sync for uuid b74d8374-d5ae-456b-9e9e-ec09459a737b {{(pid=62508) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1759.317040] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.317429] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.317868] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.318260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.318584] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.318972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.319290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.319687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.320059] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.320362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.320742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "806102ec-7622-4770-91c9-8c5723893dec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.321090] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.321653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.321791] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.322087] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.322467] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.322803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "5da47620-3979-44e8-91c5-154a1fe4ee48" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.323161] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.323771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.324093] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.324332] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1759.325538] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06162dd-51d3-4f5c-a389-7712f5fc1a82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.332157] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ead489-7051-4687-8c35-9623361720ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.336724] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b98d03b-642b-478a-972c-e518be449b8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.340866] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe5540c-6200-4cbb-941d-1cb74ee616f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.345156] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ce995c-f39d-43b1-aeb1-dac746d21034 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.348417] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.504303] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776538, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.604922] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9028c-de4d-af4f-f749-27e93d49ae1e, 'name': SearchDatastore_Task, 'duration_secs': 0.009299} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.605678] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Updated VIF entry in instance network info cache for port 65b1bfe7-bc3c-4538-9aab-563919b69ba3. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1759.606008] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Updating instance_info_cache with network_info: [{"id": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "address": "fa:16:3e:5b:96:d2", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b1bfe7-bc", "ovs_interfaceid": "65b1bfe7-bc3c-4538-9aab-563919b69ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.607231] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.607466] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1759.607695] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.607843] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.608037] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1759.608508] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb0585dd-c988-4e9e-98b2-7cf992a3eca2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.618156] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1759.618418] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1759.619291] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9095ddb9-c244-4e5c-9244-fe3bb82e04dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.625780] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1759.625780] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526eb3aa-92f3-6756-3ee7-a76effaaf935" [ 1759.625780] env[62508]: _type = "Task" [ 1759.625780] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.634810] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526eb3aa-92f3-6756-3ee7-a76effaaf935, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.704410] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776539, 'name': CreateVM_Task, 'duration_secs': 0.486497} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.704684] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776540, 'name': ReconfigVM_Task, 'duration_secs': 0.345799} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.704826] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1759.705130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Reconfigured VM instance instance-0000005c to attach disk [datastore1] b83dd148-8cf6-474b-bb19-e0822732b12a/b83dd148-8cf6-474b-bb19-e0822732b12a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1759.706434] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.706622] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.706958] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1759.707212] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6827f19-9f16-4762-8293-ed55a03a96ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.708715] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d8a346-8d2d-420f-bc37-5acca07995cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.714012] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1759.714012] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243ba76-5450-a3dd-c614-6fc2339a6e08" [ 1759.714012] env[62508]: _type = "Task" [ 1759.714012] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.718654] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1759.718654] env[62508]: value = "task-1776541" [ 1759.718654] env[62508]: _type = "Task" [ 1759.718654] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.725548] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243ba76-5450-a3dd-c614-6fc2339a6e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.731623] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776541, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.822521] env[62508]: INFO nova.scheduler.client.report [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted allocation for migration e9f19fd5-c421-44e8-bc0f-0cbc96a8fa64 [ 1759.854617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.879243] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.879526] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.880500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.897019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.578s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.897400] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.580s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.002740] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776538, 'name': CreateVM_Task, 'duration_secs': 0.63861} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.006167] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1760.006561] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.019988] env[62508]: DEBUG nova.network.neutron [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updated VIF entry in instance network info cache for port c262a662-8a1b-45b0-b251-df840455ae82. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.020447] env[62508]: DEBUG nova.network.neutron [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updating instance_info_cache with network_info: [{"id": "c262a662-8a1b-45b0-b251-df840455ae82", "address": "fa:16:3e:8c:13:f8", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc262a662-8a", "ovs_interfaceid": "c262a662-8a1b-45b0-b251-df840455ae82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.029401] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b95c51-fac8-43fc-a4dc-64a9eab7c20a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.038418] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ecbff9-5c3c-490f-8007-c31fd751db04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.072194] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a25962-4066-49cd-acf8-3909d505a566 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.080541] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65e12f5-a6c5-4a06-8597-94a9f56e0e70 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.095474] env[62508]: DEBUG nova.compute.provider_tree [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1760.109185] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Releasing lock "refresh_cache-5da47620-3979-44e8-91c5-154a1fe4ee48" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.109407] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Received event network-vif-plugged-6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.109656] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Acquiring lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.109880] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.110090] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.110271] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] No waiting events found dispatching network-vif-plugged-6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1760.110495] env[62508]: WARNING nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Received unexpected event network-vif-plugged-6fe0e3e8-4640-43e5-992e-718372bd92d1 for instance with vm_state building and task_state spawning. [ 1760.110604] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Received event network-changed-6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.110761] env[62508]: DEBUG nova.compute.manager [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Refreshing instance network info cache due to event network-changed-6fe0e3e8-4640-43e5-992e-718372bd92d1. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1760.110960] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Acquiring lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.111100] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Acquired lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.111265] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Refreshing network info cache for port 6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1760.136568] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526eb3aa-92f3-6756-3ee7-a76effaaf935, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.137464] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae9038ad-7a17-4253-96f9-97cc918dd4ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.143738] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1760.143738] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520e0096-ebca-a7e6-d8bf-ad801cf5fabb" [ 1760.143738] env[62508]: _type = "Task" [ 1760.143738] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.152470] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520e0096-ebca-a7e6-d8bf-ad801cf5fabb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.226171] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5243ba76-5450-a3dd-c614-6fc2339a6e08, 'name': SearchDatastore_Task, 'duration_secs': 0.010591} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.226835] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.227104] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.227322] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.227551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.227862] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1760.228135] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b27241fe-ccf6-4452-8555-2946ce5ab430 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.232756] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776541, 'name': Rename_Task, 'duration_secs': 0.147433} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.233284] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1760.233527] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91d108e5-cf27-479e-8cd0-9414674b7778 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.235973] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1760.235973] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d87f-d9fc-f48f-ad12-c1874ebac1a7" [ 1760.235973] env[62508]: _type = "Task" [ 1760.235973] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.241590] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1760.241590] env[62508]: value = "task-1776542" [ 1760.241590] env[62508]: _type = "Task" [ 1760.241590] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.247415] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d87f-d9fc-f48f-ad12-c1874ebac1a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.252625] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.335403] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9ff30040-38b1-4875-8466-91af1a450f26 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.475s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.337252] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.019s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.338438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb5bc13-f3cc-41bb-b26c-b61483c87cff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.523291] env[62508]: DEBUG oslo_concurrency.lockutils [req-4e88f06b-ab4a-4dbd-a46a-48e5a1592b7c req-6d0b59eb-a9af-4092-b9b3-daf4a48c1b36 service nova] Releasing lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.598619] env[62508]: DEBUG nova.scheduler.client.report [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1760.655267] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520e0096-ebca-a7e6-d8bf-ad801cf5fabb, 'name': SearchDatastore_Task, 'duration_secs': 0.010458} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.655581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.656190] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5da47620-3979-44e8-91c5-154a1fe4ee48/5da47620-3979-44e8-91c5-154a1fe4ee48.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1760.656190] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.656349] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.656573] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2a07051-b8d4-4129-a77b-debcbd08bbe5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.659044] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-248e8588-b6db-44f2-a718-cde9e7954da8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.667716] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1760.667716] env[62508]: value = "task-1776543" [ 1760.667716] env[62508]: _type = "Task" [ 1760.667716] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.668984] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.669179] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1760.672695] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c40232f-8deb-418b-b96a-34ca5605644f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.681147] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.682491] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1760.682491] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210d654-4953-e006-4acf-c3163d0989a8" [ 1760.682491] env[62508]: _type = "Task" [ 1760.682491] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.692487] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5210d654-4953-e006-4acf-c3163d0989a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009012} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.693712] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b038c909-8933-4d5f-b5bc-7105dbab7b8f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.703178] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1760.703178] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523f7c40-47ae-c493-3544-85ab2433914e" [ 1760.703178] env[62508]: _type = "Task" [ 1760.703178] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.715073] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523f7c40-47ae-c493-3544-85ab2433914e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.750191] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e9d87f-d9fc-f48f-ad12-c1874ebac1a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009923} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.751015] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.751326] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.751579] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.756629] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776542, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.850343] env[62508]: INFO nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1760.850572] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.514s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.870298] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.870436] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.870668] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.870856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.871057] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.873819] env[62508]: INFO nova.compute.manager [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Terminating instance [ 1760.875984] env[62508]: DEBUG nova.compute.manager [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1760.876226] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1760.877118] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbadae3-383b-4826-8fc3-ed196608bc37 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.887988] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1760.888305] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c95a16ae-eda6-4c38-98ca-c02848027662 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.897686] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1760.897686] env[62508]: value = "task-1776544" [ 1760.897686] env[62508]: _type = "Task" [ 1760.897686] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.908312] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.935241] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Updated VIF entry in instance network info cache for port 6fe0e3e8-4640-43e5-992e-718372bd92d1. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.935759] env[62508]: DEBUG nova.network.neutron [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Updating instance_info_cache with network_info: [{"id": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "address": "fa:16:3e:7b:6d:37", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fe0e3e8-46", "ovs_interfaceid": "6fe0e3e8-4640-43e5-992e-718372bd92d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.104071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.107492] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.976s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.107811] env[62508]: DEBUG nova.objects.instance [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lazy-loading 'resources' on Instance uuid b4427ba0-4dcf-4b21-a584-a7fee560f135 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.131665] env[62508]: INFO nova.scheduler.client.report [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance f3e1c48d-9aaf-415f-8234-82a71bb469ee [ 1761.180792] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776543, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464652} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.181178] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5da47620-3979-44e8-91c5-154a1fe4ee48/5da47620-3979-44e8-91c5-154a1fe4ee48.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1761.181445] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1761.181660] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68200acf-5340-4709-af8c-d8d83b5700b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.191327] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1761.191327] env[62508]: value = "task-1776545" [ 1761.191327] env[62508]: _type = "Task" [ 1761.191327] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.204262] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.213534] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523f7c40-47ae-c493-3544-85ab2433914e, 'name': SearchDatastore_Task, 'duration_secs': 0.008972} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.213798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.214067] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3/84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.214478] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.214678] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1761.214962] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-415bd843-f2a6-40c1-9272-f552c2f23e0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.217010] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0070fae8-6d36-4d6b-90a4-f4769a57fb13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.225547] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1761.225547] env[62508]: value = "task-1776546" [ 1761.225547] env[62508]: _type = "Task" [ 1761.225547] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.226725] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1761.226909] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1761.230560] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4c03d7c-cb06-43b7-823f-4054ca7a1b80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.237231] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1761.237231] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d8469d-0207-34a1-2848-bbb69cd69149" [ 1761.237231] env[62508]: _type = "Task" [ 1761.237231] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.240774] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.252889] env[62508]: DEBUG oslo_vmware.api [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776542, 'name': PowerOnVM_Task, 'duration_secs': 0.70064} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.256537] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1761.256828] env[62508]: INFO nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Took 13.29 seconds to spawn the instance on the hypervisor. [ 1761.257059] env[62508]: DEBUG nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1761.257347] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d8469d-0207-34a1-2848-bbb69cd69149, 'name': SearchDatastore_Task, 'duration_secs': 0.009931} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.258041] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188b4e19-9ba5-492d-bfd1-8aaad890b519 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.260997] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c03e21-cf3d-44ee-bca8-69cbe825a5cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.277252] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1761.277252] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5266789e-2b54-cc3d-312f-838cdf62822a" [ 1761.277252] env[62508]: _type = "Task" [ 1761.277252] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.291084] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5266789e-2b54-cc3d-312f-838cdf62822a, 'name': SearchDatastore_Task, 'duration_secs': 0.015194} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.291480] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.291860] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b74d8374-d5ae-456b-9e9e-ec09459a737b/b74d8374-d5ae-456b-9e9e-ec09459a737b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.292207] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7aa8d9e-97e4-4fda-986c-b004f70fcda8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.301817] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1761.301817] env[62508]: value = "task-1776547" [ 1761.301817] env[62508]: _type = "Task" [ 1761.301817] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.311454] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.412251] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776544, 'name': PowerOffVM_Task, 'duration_secs': 0.336662} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.412616] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1761.412877] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1761.413258] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de971c6f-56d3-4594-b786-49974f701947 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.439214] env[62508]: DEBUG oslo_concurrency.lockutils [req-63562f33-b821-406f-ab91-8760a62b1e7c req-939b78b5-b471-4d2a-8a2f-efa76b1b24de service nova] Releasing lock "refresh_cache-84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.645210] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6be60e5c-777a-4308-ae1c-a865762a22d1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.338s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.646977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.326s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.647942] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb461691-0fb7-423a-8a85-617cfda3fc0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.668666] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3593ddbf-5bf2-4abc-bb81-1782996490a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.692044] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1761.692472] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1761.692964] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleting the datastore file [datastore1] 95a289ac-3178-45ea-80d2-905b9af54f3c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1761.694110] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eefad9f1-8aa1-4391-9d28-32e53cdb8a82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.728670] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149969} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.729017] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1761.729017] env[62508]: value = "task-1776549" [ 1761.729017] env[62508]: _type = "Task" [ 1761.729017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.732974] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1761.735179] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2996610f-cf0d-4daa-8d8f-6a3342e726c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.757195] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.772812] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776546, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.782822] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 5da47620-3979-44e8-91c5-154a1fe4ee48/5da47620-3979-44e8-91c5-154a1fe4ee48.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1761.791978] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-323636d8-c902-442f-9c42-52a966e69bb8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.820807] env[62508]: INFO nova.compute.manager [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Took 24.41 seconds to build instance. [ 1761.833439] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776547, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.835142] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1761.835142] env[62508]: value = "task-1776550" [ 1761.835142] env[62508]: _type = "Task" [ 1761.835142] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.848090] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.995042] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf292ee2-66a0-48b1-bbd7-3b6718a55c46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.006047] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e8d599-4c59-4002-9f35-70910875b7aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.040691] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ec068c-cc8d-481c-8123-5de591dec8fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.049539] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df1f27-b098-4c48-8854-a7b48a50ec5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.072938] env[62508]: DEBUG nova.compute.provider_tree [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.222161] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "f3e1c48d-9aaf-415f-8234-82a71bb469ee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.575s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.242157] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64702} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.242259] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3/84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1762.242465] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1762.242767] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7b847ee-fc44-4793-9789-8cd2fd8b82d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.247877] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.254359] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1762.254359] env[62508]: value = "task-1776551" [ 1762.254359] env[62508]: _type = "Task" [ 1762.254359] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.263497] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.325815] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91928295-e9a9-4a51-a308-e897958e9900 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.939s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.326255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.004s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.326598] env[62508]: INFO nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] During sync_power_state the instance has a pending task (spawning). Skip. [ 1762.326890] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.330972] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904864} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.331286] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] b74d8374-d5ae-456b-9e9e-ec09459a737b/b74d8374-d5ae-456b-9e9e-ec09459a737b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1762.331539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1762.331843] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b03e86fa-c06e-4ade-a4cb-fc073fd5cf96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.341170] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1762.341170] env[62508]: value = "task-1776552" [ 1762.341170] env[62508]: _type = "Task" [ 1762.341170] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.349091] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.354755] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.576992] env[62508]: DEBUG nova.scheduler.client.report [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1762.745382] env[62508]: DEBUG oslo_vmware.api [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.569097} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.745743] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1762.746012] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1762.746263] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1762.746509] env[62508]: INFO nova.compute.manager [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Took 1.87 seconds to destroy the instance on the hypervisor. [ 1762.746869] env[62508]: DEBUG oslo.service.loopingcall [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.747167] env[62508]: DEBUG nova.compute.manager [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1762.747313] env[62508]: DEBUG nova.network.neutron [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1762.765655] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079403} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.765964] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1762.767063] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5886f701-a725-4ade-ab52-aee91a06175c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.799193] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3/84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1762.799487] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85c92b8a-369c-4398-ba0f-41f53cf66167 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.820122] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1762.820122] env[62508]: value = "task-1776553" [ 1762.820122] env[62508]: _type = "Task" [ 1762.820122] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.830122] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776553, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.852428] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776550, 'name': ReconfigVM_Task, 'duration_secs': 0.787509} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.855160] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 5da47620-3979-44e8-91c5-154a1fe4ee48/5da47620-3979-44e8-91c5-154a1fe4ee48.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1762.856534] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195808} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.856534] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37aca028-d86c-46c7-a5b9-e65787fa13eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.857613] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1762.858359] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8394865c-97b9-4e4d-9c64-e2dd7e1740aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.882757] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] b74d8374-d5ae-456b-9e9e-ec09459a737b/b74d8374-d5ae-456b-9e9e-ec09459a737b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1762.884228] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b9bdb9f-8d85-4655-9c5d-e8980bb77916 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.900367] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1762.900367] env[62508]: value = "task-1776554" [ 1762.900367] env[62508]: _type = "Task" [ 1762.900367] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.906725] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1762.906725] env[62508]: value = "task-1776555" [ 1762.906725] env[62508]: _type = "Task" [ 1762.906725] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.910429] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776554, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.919628] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.082851] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.975s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.085682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.036s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.085939] env[62508]: DEBUG nova.objects.instance [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'pci_requests' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.110906] env[62508]: INFO nova.scheduler.client.report [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Deleted allocations for instance b4427ba0-4dcf-4b21-a584-a7fee560f135 [ 1763.332203] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.407282] env[62508]: DEBUG nova.compute.manager [req-27b45e09-a470-4501-b084-e810ebbeeafa req-22bdde70-5f02-4240-b3cb-308a6e2814f6 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Received event network-vif-deleted-d9e88907-9194-4d90-87ab-d1b87ef0e48e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.407282] env[62508]: INFO nova.compute.manager [req-27b45e09-a470-4501-b084-e810ebbeeafa req-22bdde70-5f02-4240-b3cb-308a6e2814f6 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Neutron deleted interface d9e88907-9194-4d90-87ab-d1b87ef0e48e; detaching it from the instance and deleting it from the info cache [ 1763.407647] env[62508]: DEBUG nova.network.neutron [req-27b45e09-a470-4501-b084-e810ebbeeafa req-22bdde70-5f02-4240-b3cb-308a6e2814f6 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.416830] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776554, 'name': Rename_Task, 'duration_secs': 0.482578} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.417900] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1763.418163] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f955c9c2-5616-4340-bcfe-b8c3cd3294f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.423662] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.429024] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1763.429024] env[62508]: value = "task-1776556" [ 1763.429024] env[62508]: _type = "Task" [ 1763.429024] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.437652] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.590248] env[62508]: DEBUG nova.objects.instance [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'numa_topology' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.621530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cae1b5b0-0acd-4a87-a792-f5aad565f025 tempest-MultipleCreateTestJSON-1212467534 tempest-MultipleCreateTestJSON-1212467534-project-member] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.622530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.301s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.623033] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4296639-9acf-41cc-9c22-4c4e54de250a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.634188] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3244d33-9c1a-4602-8c18-8f2f03e1c3be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.685182] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.685419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.832346] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776553, 'name': ReconfigVM_Task, 'duration_secs': 0.954303} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.832679] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3/84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1763.833259] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e495ed6-aef3-40b0-8189-a735d5cb08ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.840946] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1763.840946] env[62508]: value = "task-1776557" [ 1763.840946] env[62508]: _type = "Task" [ 1763.840946] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.850066] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776557, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.876733] env[62508]: DEBUG nova.network.neutron [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.910445] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbcc2dbe-e923-49f9-84a0-b3f656055a95 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.925917] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776555, 'name': ReconfigVM_Task, 'duration_secs': 0.878617} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.927666] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Reconfigured VM instance instance-0000005f to attach disk [datastore1] b74d8374-d5ae-456b-9e9e-ec09459a737b/b74d8374-d5ae-456b-9e9e-ec09459a737b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1763.928610] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2b0344f-b3c9-438b-af04-6b6cd90688fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.932989] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d678df94-0bb8-467d-b8b6-809d0b86e8f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.954483] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.956013] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1763.956013] env[62508]: value = "task-1776558" [ 1763.956013] env[62508]: _type = "Task" [ 1763.956013] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.965171] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776558, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.975933] env[62508]: DEBUG nova.compute.manager [req-27b45e09-a470-4501-b084-e810ebbeeafa req-22bdde70-5f02-4240-b3cb-308a6e2814f6 service nova] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Detach interface failed, port_id=d9e88907-9194-4d90-87ab-d1b87ef0e48e, reason: Instance 95a289ac-3178-45ea-80d2-905b9af54f3c could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1764.093577] env[62508]: INFO nova.compute.claims [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1764.175423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b4427ba0-4dcf-4b21-a584-a7fee560f135" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.188072] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1764.355749] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776557, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.379728] env[62508]: INFO nova.compute.manager [-] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Took 1.63 seconds to deallocate network for instance. [ 1764.453237] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.464773] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776558, 'name': Rename_Task, 'duration_secs': 0.399269} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.466522] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1764.466522] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7ec95a5-44fb-4789-b9b8-ac0b98735f78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.472090] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1764.472090] env[62508]: value = "task-1776559" [ 1764.472090] env[62508]: _type = "Task" [ 1764.472090] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.480516] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.718353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.853311] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776557, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.889269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.955333] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.982487] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.354230] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776557, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.362404] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75b2782-8129-42b7-881c-68fbe31adebc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.370759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d3d582-f1d6-4a5e-9c53-0d18031e46a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.407956] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723bd0db-f7f8-4d63-a567-e5a8254e1379 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.417252] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102972b5-989a-4fae-b56e-36d2b652dbbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.433624] env[62508]: DEBUG nova.compute.provider_tree [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1765.462800] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.482474] env[62508]: DEBUG oslo_vmware.api [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776559, 'name': PowerOnVM_Task, 'duration_secs': 0.78883} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.482750] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1765.483051] env[62508]: INFO nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1765.483373] env[62508]: DEBUG nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1765.484148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7858ff9e-9248-4af6-869e-dcda9e2f0e42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.853870] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776557, 'name': Rename_Task, 'duration_secs': 1.625615} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.854819] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1765.854819] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d152cb23-f0e7-47bc-b3a1-36f3f04bc06a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.861468] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1765.861468] env[62508]: value = "task-1776560" [ 1765.861468] env[62508]: _type = "Task" [ 1765.861468] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.869830] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.957165] env[62508]: DEBUG oslo_vmware.api [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776556, 'name': PowerOnVM_Task, 'duration_secs': 2.120547} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.958390] env[62508]: ERROR nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [req-c50b8291-6c00-4032-b55d-bda543ddd63b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c50b8291-6c00-4032-b55d-bda543ddd63b"}]} [ 1765.958774] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1765.959059] env[62508]: INFO nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Took 15.62 seconds to spawn the instance on the hypervisor. [ 1765.959401] env[62508]: DEBUG nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1765.964031] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eced90-09c0-4cd1-a292-0c1f9bea7803 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.975377] env[62508]: DEBUG nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1765.991256] env[62508]: DEBUG nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1765.991256] env[62508]: DEBUG nova.compute.provider_tree [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1766.004396] env[62508]: DEBUG nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1766.007031] env[62508]: INFO nova.compute.manager [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Took 24.41 seconds to build instance. [ 1766.025803] env[62508]: DEBUG nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1766.207556] env[62508]: DEBUG nova.compute.manager [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Received event network-changed-c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1766.207723] env[62508]: DEBUG nova.compute.manager [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Refreshing instance network info cache due to event network-changed-c262a662-8a1b-45b0-b251-df840455ae82. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1766.207944] env[62508]: DEBUG oslo_concurrency.lockutils [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] Acquiring lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.208061] env[62508]: DEBUG oslo_concurrency.lockutils [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] Acquired lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.208227] env[62508]: DEBUG nova.network.neutron [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Refreshing network info cache for port c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1766.255319] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb29d461-0c2c-4196-8377-5cf03178f125 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.265857] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53696caf-0cb8-4097-b7dd-d04a813b4d12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.298329] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817951e5-794b-4eda-9bd1-0ef624151dc6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.307109] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a8c9c0-ecc0-4162-8c42-c3d15739f3ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.322835] env[62508]: DEBUG nova.compute.provider_tree [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1766.372322] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776560, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.483047] env[62508]: INFO nova.compute.manager [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Took 28.41 seconds to build instance. [ 1766.508919] env[62508]: DEBUG oslo_concurrency.lockutils [None req-585a30c2-a1ba-4d1e-914c-3ba7d4cae032 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.926s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.509546] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.186s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.509770] env[62508]: INFO nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1766.509980] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.873909] env[62508]: DEBUG oslo_vmware.api [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776560, 'name': PowerOnVM_Task, 'duration_secs': 0.910413} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.875040] env[62508]: DEBUG nova.scheduler.client.report [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 151 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1766.875316] env[62508]: DEBUG nova.compute.provider_tree [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 151 to 152 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1766.875504] env[62508]: DEBUG nova.compute.provider_tree [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1766.879182] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1766.879420] env[62508]: INFO nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Took 14.11 seconds to spawn the instance on the hypervisor. [ 1766.879771] env[62508]: DEBUG nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1766.883524] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aff934a-384f-44a0-af5a-76db0abb004c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.962277] env[62508]: DEBUG nova.network.neutron [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updated VIF entry in instance network info cache for port c262a662-8a1b-45b0-b251-df840455ae82. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.962578] env[62508]: DEBUG nova.network.neutron [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updating instance_info_cache with network_info: [{"id": "c262a662-8a1b-45b0-b251-df840455ae82", "address": "fa:16:3e:8c:13:f8", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc262a662-8a", "ovs_interfaceid": "c262a662-8a1b-45b0-b251-df840455ae82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.985340] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a1d75a21-5d57-4756-8052-bac0940cbbb8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.921s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.985505] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.663s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.985922] env[62508]: INFO nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] During sync_power_state the instance has a pending task (spawning). Skip. [ 1766.985922] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.384598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.299s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.386784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.986s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.387100] env[62508]: DEBUG nova.objects.instance [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'resources' on Instance uuid aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1767.401170] env[62508]: INFO nova.compute.manager [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Took 29.00 seconds to build instance. [ 1767.426714] env[62508]: INFO nova.network.neutron [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating port a51ee93a-fba9-4802-9791-4c16f273346e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1767.465719] env[62508]: DEBUG oslo_concurrency.lockutils [req-4cef93aa-7d2b-4b8c-b1f5-6b3215c08712 req-bb603cf5-648c-4ad8-a73d-a8454a504739 service nova] Releasing lock "refresh_cache-b74d8374-d5ae-456b-9e9e-ec09459a737b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.903844] env[62508]: DEBUG oslo_concurrency.lockutils [None req-373dab91-5333-4ab3-b5c5-dbf362ec3268 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.509s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.904235] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.581s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.904348] env[62508]: INFO nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] During sync_power_state the instance has a pending task (spawning). Skip. [ 1767.904518] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.097447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6365db8a-96b9-4b8e-85ea-4b320c789583 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.106434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f25c3e-6285-4db6-a7b2-00da9b9dfcfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.138299] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bad0f8-4d2e-4aa2-8752-e5c388b977ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.147284] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6699eb3f-4a64-4540-98e0-406a395a7067 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.161958] env[62508]: DEBUG nova.compute.provider_tree [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.665725] env[62508]: DEBUG nova.scheduler.client.report [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.962163] env[62508]: DEBUG nova.compute.manager [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1768.962390] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.962422] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.962575] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.962746] env[62508]: DEBUG nova.compute.manager [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] No waiting events found dispatching network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1768.962913] env[62508]: WARNING nova.compute.manager [req-9ec2123d-aaaf-47e9-94f7-fce437469f5f req-bc00d785-6826-44c4-8442-23dc7290874e service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received unexpected event network-vif-plugged-a51ee93a-fba9-4802-9791-4c16f273346e for instance with vm_state shelved_offloaded and task_state spawning. [ 1769.043925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.043925] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.043925] env[62508]: DEBUG nova.network.neutron [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1769.175249] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.177805] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.245s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.178213] env[62508]: DEBUG nova.objects.instance [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lazy-loading 'resources' on Instance uuid 806102ec-7622-4770-91c9-8c5723893dec {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.197127] env[62508]: INFO nova.scheduler.client.report [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted allocations for instance aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a [ 1769.614977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.615306] env[62508]: DEBUG oslo_concurrency.lockutils [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.615460] env[62508]: DEBUG nova.compute.manager [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1769.616407] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fdf2d8-8093-4f73-ae06-8729c3d18fc1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.625504] env[62508]: DEBUG nova.compute.manager [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1769.626103] env[62508]: DEBUG nova.objects.instance [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'flavor' on Instance uuid b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.710099] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aa429714-e947-4888-a4bd-c455c8a673eb tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.642s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.711844] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.392s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.712630] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9009c8ae-c206-4b91-9126-c8d63ff63c4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.738295] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac37c65e-902d-40d4-88ae-0d5902baf655 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.822662] env[62508]: DEBUG nova.network.neutron [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.924633] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0ad38f-a05f-4fa5-b660-1b86a2c8fa48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.934532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f88d26-b961-45f8-9d17-ce44c44f7077 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.965759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed482647-a95c-4eb3-a7c7-c2cecb9aa8e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.975564] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fca44a-9208-413a-85de-c9154885acca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.990360] env[62508]: DEBUG nova.compute.provider_tree [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.134420] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1770.134737] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3f9db41-1d56-4f7a-870d-2d4194abc4de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.146154] env[62508]: DEBUG oslo_vmware.api [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1770.146154] env[62508]: value = "task-1776563" [ 1770.146154] env[62508]: _type = "Task" [ 1770.146154] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.163521] env[62508]: DEBUG oslo_vmware.api [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.288792] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.577s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.325752] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.356203] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='de56062eaa13832bc78af859c8230e65',container_format='bare',created_at=2024-12-11T22:16:04Z,direct_url=,disk_format='vmdk',id=cf41ffb1-ea55-4c93-bf70-33b17e44f550,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1680203631-shelved',owner='86a46b5a43dd41e48816a8d86e3685b9',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-12-11T22:16:26Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1770.357080] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1770.357227] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.358049] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1770.358049] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.358049] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1770.359044] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1770.359044] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1770.359044] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1770.359234] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1770.359437] env[62508]: DEBUG nova.virt.hardware [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1770.360475] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a048c51-bbe8-422d-bfc3-3ced9a86bdd3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.370305] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a62e02f-a38e-4aa2-b811-7d688a6d16bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.385852] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:cd:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a51ee93a-fba9-4802-9791-4c16f273346e', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1770.394261] env[62508]: DEBUG oslo.service.loopingcall [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.394741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1770.395050] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cfc252a-5034-4bb7-9e32-fad0387e7b0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.414606] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1770.414606] env[62508]: value = "task-1776565" [ 1770.414606] env[62508]: _type = "Task" [ 1770.414606] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.425333] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776565, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.493842] env[62508]: DEBUG nova.scheduler.client.report [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.660304] env[62508]: DEBUG oslo_vmware.api [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776563, 'name': PowerOffVM_Task, 'duration_secs': 0.338208} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.660543] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1770.660717] env[62508]: DEBUG nova.compute.manager [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1770.661587] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1338dd5-aebf-4593-b907-54f91c59e032 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.926908] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776565, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.985733] env[62508]: DEBUG nova.compute.manager [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1770.985983] env[62508]: DEBUG nova.compute.manager [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing instance network info cache due to event network-changed-a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1770.986297] env[62508]: DEBUG oslo_concurrency.lockutils [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] Acquiring lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.986408] env[62508]: DEBUG oslo_concurrency.lockutils [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] Acquired lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.986525] env[62508]: DEBUG nova.network.neutron [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Refreshing network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1770.998262] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.000833] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.819s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.002545] env[62508]: INFO nova.compute.claims [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1771.025104] env[62508]: INFO nova.scheduler.client.report [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Deleted allocations for instance 806102ec-7622-4770-91c9-8c5723893dec [ 1771.175250] env[62508]: DEBUG oslo_concurrency.lockutils [None req-00e864a6-9e72-4e73-a02c-1c9e8dd54ff8 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.425886] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776565, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.531338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-086c45de-0b4b-4a06-9fce-10fe004460de tempest-ServersTestFqdnHostnames-283149078 tempest-ServersTestFqdnHostnames-283149078-project-member] Lock "806102ec-7622-4770-91c9-8c5723893dec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.268s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.532377] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "806102ec-7622-4770-91c9-8c5723893dec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.212s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.532676] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29d1349e-7e7a-4343-818a-d3bb6f6e81ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.543224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd950d66-f039-4814-b667-3b95c30d6ea1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.777994] env[62508]: DEBUG nova.network.neutron [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updated VIF entry in instance network info cache for port a51ee93a-fba9-4802-9791-4c16f273346e. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1771.778512] env[62508]: DEBUG nova.network.neutron [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [{"id": "a51ee93a-fba9-4802-9791-4c16f273346e", "address": "fa:16:3e:5a:cd:a0", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa51ee93a-fb", "ovs_interfaceid": "a51ee93a-fba9-4802-9791-4c16f273346e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.928629] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776565, 'name': CreateVM_Task, 'duration_secs': 1.090632} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.928820] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1771.929555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.929726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.930134] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1771.930406] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3a82a5d-c866-45f5-b3fc-fdb1049b6848 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.935871] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1771.935871] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d7a323-3c89-70b0-3461-0292470f23a1" [ 1771.935871] env[62508]: _type = "Task" [ 1771.935871] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.949840] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d7a323-3c89-70b0-3461-0292470f23a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.087170] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "806102ec-7622-4770-91c9-8c5723893dec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.087649] env[62508]: DEBUG nova.objects.instance [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'flavor' on Instance uuid b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.195174] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f695e1b-e7e6-48a1-9411-5221f497cdba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.204038] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaeee2cd-1c14-443c-9034-85541ad2690c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.236807] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba8beca-9415-402e-9150-001e38ad7b38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.245201] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10190e43-1dab-458c-9696-b47c4292b80d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.261105] env[62508]: DEBUG nova.compute.provider_tree [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1772.281326] env[62508]: DEBUG oslo_concurrency.lockutils [req-13879a45-147e-43ea-815b-bfbcc862e750 req-f7062422-5df0-4126-86f7-feb45bc249cc service nova] Releasing lock "refresh_cache-a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.447527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.447850] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Processing image cf41ffb1-ea55-4c93-bf70-33b17e44f550 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1772.448026] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.448186] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.448373] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1772.448633] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6eda2f3-3032-40c7-a5f2-3ae7ad84321e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.458230] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1772.458420] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1772.459192] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b728187-9db2-4b45-8cc5-babf5f0640b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.464934] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1772.464934] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5297735f-e928-5aee-e194-f975db51a19a" [ 1772.464934] env[62508]: _type = "Task" [ 1772.464934] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.479461] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1772.479722] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Fetch image to [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f/OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1772.479907] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Downloading stream optimized image cf41ffb1-ea55-4c93-bf70-33b17e44f550 to [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f/OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f.vmdk on the data store datastore1 as vApp {{(pid=62508) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1772.480092] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Downloading image file data cf41ffb1-ea55-4c93-bf70-33b17e44f550 to the ESX as VM named 'OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f' {{(pid=62508) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1772.563680] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1772.563680] env[62508]: value = "resgroup-9" [ 1772.563680] env[62508]: _type = "ResourcePool" [ 1772.563680] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1772.563990] env[62508]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6aee591e-685d-4f2e-aba1-72af23c6559c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.586951] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease: (returnval){ [ 1772.586951] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1772.586951] env[62508]: _type = "HttpNfcLease" [ 1772.586951] env[62508]: } obtained for vApp import into resource pool (val){ [ 1772.586951] env[62508]: value = "resgroup-9" [ 1772.586951] env[62508]: _type = "ResourcePool" [ 1772.586951] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1772.587287] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the lease: (returnval){ [ 1772.587287] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1772.587287] env[62508]: _type = "HttpNfcLease" [ 1772.587287] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1772.593199] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.593273] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquired lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.593448] env[62508]: DEBUG nova.network.neutron [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.593654] env[62508]: DEBUG nova.objects.instance [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'info_cache' on Instance uuid b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.596527] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1772.596527] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1772.596527] env[62508]: _type = "HttpNfcLease" [ 1772.596527] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1772.795059] env[62508]: DEBUG nova.scheduler.client.report [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1772.795500] env[62508]: DEBUG nova.compute.provider_tree [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 152 to 153 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1772.796344] env[62508]: DEBUG nova.compute.provider_tree [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1773.100091] env[62508]: DEBUG nova.objects.base [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1773.104899] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1773.104899] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1773.104899] env[62508]: _type = "HttpNfcLease" [ 1773.104899] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1773.306058] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.306633] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1773.314893] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.218s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.317171] env[62508]: INFO nova.compute.claims [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1773.599596] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1773.599596] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1773.599596] env[62508]: _type = "HttpNfcLease" [ 1773.599596] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1773.601143] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1773.601143] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52519c84-8df1-a728-ce1f-5b1ec80db7a5" [ 1773.601143] env[62508]: _type = "HttpNfcLease" [ 1773.601143] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1773.601143] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e2e696-ccdf-4a0b-8015-ead0ea5e99d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.603769] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.606180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.606180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.606180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.606180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.610071] env[62508]: INFO nova.compute.manager [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Terminating instance [ 1773.614745] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1773.614930] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1773.619053] env[62508]: DEBUG nova.compute.manager [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1773.619053] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1773.620788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbad26d9-d585-4b48-aa49-8d8944a78109 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.689512] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.689787] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-543925c4-4eed-476b-afb4-95fbc1b8d80f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.698730] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b7ddb074-d67a-4e96-a036-bc6315c5f6c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.700862] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1773.700862] env[62508]: value = "task-1776568" [ 1773.700862] env[62508]: _type = "Task" [ 1773.700862] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.714373] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.822628] env[62508]: DEBUG nova.compute.utils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.825996] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1773.826191] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1773.880802] env[62508]: DEBUG nova.policy [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81a4d4d78c7e4f3681eb50940c636e3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a10752e7a94a038d1bebf4a2bf4986', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1773.886915] env[62508]: DEBUG nova.network.neutron [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Updating instance_info_cache with network_info: [{"id": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "address": "fa:16:3e:6b:09:6d", "network": {"id": "9442efbf-54d6-4a21-81ab-3e50e7f19b4c", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2050937254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdc04dc308e44668828caa29bfdbe122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf79ec10a-7a", "ovs_interfaceid": "f79ec10a-7a06-4ee2-8de0-4db1e03d23d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.223378] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776568, 'name': PowerOffVM_Task, 'duration_secs': 0.218752} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.225191] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1774.225429] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1774.225727] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8412b251-604f-4330-be2f-b9f11ca095f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.327163] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1774.390123] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Successfully created port: 792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.396336] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Releasing lock "refresh_cache-b83dd148-8cf6-474b-bb19-e0822732b12a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.421384] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1774.421784] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1774.422017] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleting the datastore file [datastore1] a10a4217-ae46-4f00-9ba1-cdf74f44ec7b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1774.422624] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46141dd7-5357-41cf-8c0e-d968c0a71371 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.432970] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1774.432970] env[62508]: value = "task-1776570" [ 1774.432970] env[62508]: _type = "Task" [ 1774.432970] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.445332] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.600574] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b11c06-b201-4a62-8313-717d90f4b91a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.615133] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d11d36e-b36f-479c-a00c-668129bca50e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.661546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b2fd23-898b-4063-9d12-4e06506018a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.677779] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91477d87-bafa-4309-9509-1b383ea928ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.698689] env[62508]: DEBUG nova.compute.provider_tree [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.907515] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1774.907885] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac1ce180-8eb2-4449-8126-939b5f970331 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.918022] env[62508]: DEBUG oslo_vmware.api [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1774.918022] env[62508]: value = "task-1776572" [ 1774.918022] env[62508]: _type = "Task" [ 1774.918022] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.929862] env[62508]: DEBUG oslo_vmware.api [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.947050] env[62508]: DEBUG oslo_vmware.api [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328164} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.949573] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1774.949899] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1774.950337] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1774.950610] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1774.950885] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1774.951190] env[62508]: INFO nova.compute.manager [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1774.951713] env[62508]: DEBUG oslo.service.loopingcall [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.952727] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e761457-4558-40a5-a951-dee7c37ae027 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.956148] env[62508]: DEBUG nova.compute.manager [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1774.956352] env[62508]: DEBUG nova.network.neutron [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1774.963601] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1774.963834] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1774.964209] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6c0485c3-1843-4081-a54f-16a8f0a5bc29 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.204954] env[62508]: DEBUG nova.scheduler.client.report [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1775.216648] env[62508]: DEBUG oslo_vmware.rw_handles [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524632a4-87bc-3fb7-b766-d4c3e591fa8d/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1775.216844] env[62508]: INFO nova.virt.vmwareapi.images [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Downloaded image file data cf41ffb1-ea55-4c93-bf70-33b17e44f550 [ 1775.219502] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf377d8-2bb6-4969-bef9-62cb18b11cd4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.246737] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e734042f-26bb-4b87-bbcc-1ac380230a91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.275867] env[62508]: INFO nova.virt.vmwareapi.images [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] The imported VM was unregistered [ 1775.279908] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1775.280214] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.280537] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5a9dc4f-0c97-46ae-811f-a3d6b31ba603 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.297564] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.297834] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f/OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f.vmdk to [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk. {{(pid=62508) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1775.298134] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-064b935d-3e06-46b8-9831-9e277410435a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.309613] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1775.309613] env[62508]: value = "task-1776574" [ 1775.309613] env[62508]: _type = "Task" [ 1775.309613] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.324535] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.342225] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1775.376040] env[62508]: DEBUG nova.compute.manager [req-d90c671f-aca5-4621-bebb-372d733387f0 req-e60134c0-31b4-4757-b948-b6f34094b5f5 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Received event network-vif-deleted-f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.376040] env[62508]: INFO nova.compute.manager [req-d90c671f-aca5-4621-bebb-372d733387f0 req-e60134c0-31b4-4757-b948-b6f34094b5f5 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Neutron deleted interface f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63; detaching it from the instance and deleting it from the info cache [ 1775.376040] env[62508]: DEBUG nova.network.neutron [req-d90c671f-aca5-4621-bebb-372d733387f0 req-e60134c0-31b4-4757-b948-b6f34094b5f5 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.391254] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1775.391626] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1775.391813] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1775.392254] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1775.392549] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1775.392695] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1775.392941] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1775.393158] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1775.393349] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1775.393553] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1775.393767] env[62508]: DEBUG nova.virt.hardware [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1775.395157] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e79b6b7-aee5-41c8-91c3-287a8d98918b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.406506] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133b85c6-eca7-4e18-b8d3-4c2a9b3718c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.437525] env[62508]: DEBUG oslo_vmware.api [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776572, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.713039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.713039] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1775.718610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.862s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.718610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.718610] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1775.718610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.720182] env[62508]: INFO nova.compute.claims [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1775.727019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8f933f-42f4-4a72-861c-43cd7a6fbc3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.735031] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00356140-5451-47d7-be33-887164af51db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.751429] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26072cc7-a9dd-4682-806a-29509ba50bc3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.759274] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a033d8-e1ac-46a1-9a62-ba1a8d5f41a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.793703] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180072MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1775.793917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.820748] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.833752] env[62508]: DEBUG nova.network.neutron [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.882481] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8889c23c-b0ca-4460-95e1-5e546c6ac103 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.893203] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71ccec1-5e06-48ba-bec1-1e53a80f418c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.926239] env[62508]: DEBUG nova.compute.manager [req-d90c671f-aca5-4621-bebb-372d733387f0 req-e60134c0-31b4-4757-b948-b6f34094b5f5 service nova] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Detach interface failed, port_id=f8a2c0d6-efe1-43a3-a9cf-a1a188e34f63, reason: Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1775.940947] env[62508]: DEBUG oslo_vmware.api [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776572, 'name': PowerOnVM_Task, 'duration_secs': 0.920354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.941299] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1775.941493] env[62508]: DEBUG nova.compute.manager [None req-4d83f411-e7f8-418c-98c4-14af969c981e tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1775.942393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d31f0c8-1928-4798-8e0b-fc0bc6d1e377 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.229271] env[62508]: DEBUG nova.compute.utils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1776.231098] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1776.234116] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1776.303376] env[62508]: DEBUG nova.policy [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1776.321915] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.336639] env[62508]: INFO nova.compute.manager [-] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Took 1.38 seconds to deallocate network for instance. [ 1776.531814] env[62508]: DEBUG nova.compute.manager [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Received event network-vif-plugged-792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1776.532121] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] Acquiring lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.532319] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.532484] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.532627] env[62508]: DEBUG nova.compute.manager [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] No waiting events found dispatching network-vif-plugged-792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1776.532793] env[62508]: WARNING nova.compute.manager [req-1bfbcfb8-f28c-4f0d-824a-37ccb434820c req-44d3cb0f-26d4-4b61-bd1f-2a55dc8af484 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Received unexpected event network-vif-plugged-792efee6-358d-45ae-b4ff-79e8fea4ff64 for instance with vm_state building and task_state spawning. [ 1776.647240] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Successfully updated port: 792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1776.738000] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1776.823582] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.845850] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.987329] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Successfully created port: ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1777.019288] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8909cf-aba0-42d0-80c0-316cdfccb69c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.028459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80135201-8a7e-4a65-b4b1-4245d43e0c7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.076949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d22eac3-cc92-435c-8d96-021db2252a1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.088993] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41283789-cc09-47cc-a0b8-b33a8d7ca0dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.111894] env[62508]: DEBUG nova.compute.provider_tree [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1777.154249] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.154249] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquired lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.154401] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1777.324486] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.614737] env[62508]: DEBUG nova.scheduler.client.report [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1777.696145] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1777.749835] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1777.790603] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1777.793682] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1777.793682] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1777.793682] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1777.793682] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1777.793922] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1777.797279] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1777.797279] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1777.797279] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1777.797279] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1777.797279] env[62508]: DEBUG nova.virt.hardware [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1777.797279] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78257155-e25f-47c3-80c2-2e6787a977ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.813662] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7643c8-3492-4b09-989d-2b61ed9ef7a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.841278] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.955466] env[62508]: DEBUG nova.network.neutron [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updating instance_info_cache with network_info: [{"id": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "address": "fa:16:3e:51:1a:93", "network": {"id": "65a331e8-0460-4acd-b98d-a2e2e25a0e72", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1387918119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a10752e7a94a038d1bebf4a2bf4986", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792efee6-35", "ovs_interfaceid": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.119975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.120623] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1778.124975] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.236s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.125238] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.129905] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.336s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.174335] env[62508]: INFO nova.scheduler.client.report [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted allocations for instance 95a289ac-3178-45ea-80d2-905b9af54f3c [ 1778.334780] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.460491] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Releasing lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.460849] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Instance network_info: |[{"id": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "address": "fa:16:3e:51:1a:93", "network": {"id": "65a331e8-0460-4acd-b98d-a2e2e25a0e72", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1387918119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a10752e7a94a038d1bebf4a2bf4986", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792efee6-35", "ovs_interfaceid": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1778.461305] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:1a:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c06e3c2-8edb-4cf0-be6b-45dfe059c00b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '792efee6-358d-45ae-b4ff-79e8fea4ff64', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1778.470217] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Creating folder: Project (91a10752e7a94a038d1bebf4a2bf4986). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1778.470850] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a98535e-2c67-4440-bdb5-012577fb245e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.482833] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Created folder: Project (91a10752e7a94a038d1bebf4a2bf4986) in parent group-v368536. [ 1778.483052] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Creating folder: Instances. Parent ref: group-v368807. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1778.483313] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2dbc9e84-fcd6-45ab-941c-de4157aa249e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.493855] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Created folder: Instances in parent group-v368807. [ 1778.494126] env[62508]: DEBUG oslo.service.loopingcall [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1778.494328] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1778.494543] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b2597ea-f6d5-44c0-ada5-4b38b56a30ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.519984] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1778.519984] env[62508]: value = "task-1776578" [ 1778.519984] env[62508]: _type = "Task" [ 1778.519984] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.532182] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776578, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.597038] env[62508]: DEBUG nova.compute.manager [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Received event network-changed-792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1778.597330] env[62508]: DEBUG nova.compute.manager [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Refreshing instance network info cache due to event network-changed-792efee6-358d-45ae-b4ff-79e8fea4ff64. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1778.597601] env[62508]: DEBUG oslo_concurrency.lockutils [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] Acquiring lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.597854] env[62508]: DEBUG oslo_concurrency.lockutils [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] Acquired lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.597978] env[62508]: DEBUG nova.network.neutron [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Refreshing network info cache for port 792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1778.634226] env[62508]: DEBUG nova.compute.utils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.645103] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1778.645337] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1778.683847] env[62508]: DEBUG oslo_concurrency.lockutils [None req-35cf67e6-c7cd-4a27-8190-d9cb893f2a0c tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "95a289ac-3178-45ea-80d2-905b9af54f3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.813s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.704381] env[62508]: DEBUG nova.policy [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1778.835884] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776574, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.055407} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.836987] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f/OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f.vmdk to [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk. [ 1778.836987] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Cleaning up location [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1778.836987] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_70bf8bf6-9a1d-46b6-88de-c81c15811e7f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1778.836987] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-287a9da9-e31c-4a52-a4c7-07fe0b4c14d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.847204] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1778.847204] env[62508]: value = "task-1776579" [ 1778.847204] env[62508]: _type = "Task" [ 1778.847204] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.858108] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.032476] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776578, 'name': CreateVM_Task, 'duration_secs': 0.497756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.032476] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1779.033155] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.036066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.036066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1779.036066] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faddfaef-cde3-4381-85cc-23fc54a072d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.039900] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1779.039900] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521a677a-07c7-560d-d876-c158669cb54b" [ 1779.039900] env[62508]: _type = "Task" [ 1779.039900] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.054516] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521a677a-07c7-560d-d876-c158669cb54b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.130018] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Successfully created port: 4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1779.146395] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1779.180086] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.180086] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.180211] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1779.180393] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 24091abb-f71f-4528-8fc5-b97725cf079e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.180393] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 2aeb5a4c-785a-4238-8575-ecd1ff84b97c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.180487] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b83dd148-8cf6-474b-bb19-e0822732b12a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.180629] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.182363] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 5da47620-3979-44e8-91c5-154a1fe4ee48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.182966] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.182966] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b74d8374-d5ae-456b-9e9e-ec09459a737b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.182966] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e7f521db-2dab-4c2c-bf2b-aa6e217f29bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.182966] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 0a4958d5-b9a9-4854-90ca-f19eb34cb15b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.183269] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance a15f3cef-c260-4a54-83af-7cccf81e15a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1779.250993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.251302] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.251522] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.251732] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.251909] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.254139] env[62508]: INFO nova.compute.manager [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Terminating instance [ 1779.255996] env[62508]: DEBUG nova.compute.manager [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1779.256330] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1779.257285] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e7eb43-8d6b-4ba3-9e6e-2009d543cbad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.268815] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.269166] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54e0bb3a-872f-4d7d-bd8e-f68cc8267420 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.276590] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1779.276590] env[62508]: value = "task-1776580" [ 1779.276590] env[62508]: _type = "Task" [ 1779.276590] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.286023] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.357955] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044963} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.359470] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1779.361171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.361171] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk to [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1779.361171] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a3f4b0f-98f5-42ed-a700-69731010c3fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.368472] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1779.368472] env[62508]: value = "task-1776581" [ 1779.368472] env[62508]: _type = "Task" [ 1779.368472] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.378407] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.450394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "3e79a6d4-8639-478e-8753-71ff0e07496f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.450670] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.551737] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521a677a-07c7-560d-d876-c158669cb54b, 'name': SearchDatastore_Task, 'duration_secs': 0.04972} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.552671] env[62508]: DEBUG nova.network.neutron [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updated VIF entry in instance network info cache for port 792efee6-358d-45ae-b4ff-79e8fea4ff64. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1779.552991] env[62508]: DEBUG nova.network.neutron [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updating instance_info_cache with network_info: [{"id": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "address": "fa:16:3e:51:1a:93", "network": {"id": "65a331e8-0460-4acd-b98d-a2e2e25a0e72", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1387918119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a10752e7a94a038d1bebf4a2bf4986", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792efee6-35", "ovs_interfaceid": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.554339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.554539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1779.554773] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.554918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.555187] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1779.555631] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd0835e6-033d-4027-be2c-9cc6de904608 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.574927] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1779.575360] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1779.575934] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72164a08-aac0-4c1a-8ded-cfe9f7559234 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.583304] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1779.583304] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a6518-5684-5017-ac29-50addd693e55" [ 1779.583304] env[62508]: _type = "Task" [ 1779.583304] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.595821] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a6518-5684-5017-ac29-50addd693e55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.685611] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 3e79a6d4-8639-478e-8753-71ff0e07496f has allocations against this compute host but is not found in the database. [ 1779.685872] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1779.686208] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1779.760771] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Successfully updated port: ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1779.772994] env[62508]: DEBUG nova.compute.manager [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-vif-plugged-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1779.772994] env[62508]: DEBUG oslo_concurrency.lockutils [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.772994] env[62508]: DEBUG oslo_concurrency.lockutils [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.772994] env[62508]: DEBUG oslo_concurrency.lockutils [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.772994] env[62508]: DEBUG nova.compute.manager [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] No waiting events found dispatching network-vif-plugged-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1779.772994] env[62508]: WARNING nova.compute.manager [req-2372346e-430b-4043-bfd2-deaab5b75c63 req-c00a2e30-bb5a-4b54-af4b-50dc3f4cb8fe service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received unexpected event network-vif-plugged-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 for instance with vm_state building and task_state spawning. [ 1779.796604] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.883166] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.941062] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1145b217-5554-4deb-83d0-c7aaef4fa428 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.948922] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38422b10-686c-4517-ad99-60a417ecd1e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.952984] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1779.984195] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9866f87e-51b7-4262-972e-912f9211a5eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.993668] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1b3ecd-500d-46ec-9b12-c7edf7d0551f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.009996] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.057149] env[62508]: DEBUG oslo_concurrency.lockutils [req-b50c365a-573c-4207-a18e-fa56ae3b8431 req-227fc37d-a6ca-4769-8b11-67b2fc422c6f service nova] Releasing lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.095022] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526a6518-5684-5017-ac29-50addd693e55, 'name': SearchDatastore_Task, 'duration_secs': 0.072426} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.095943] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c89d04cf-fdbe-4cb2-a9cf-26f51c145aeb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.102511] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1780.102511] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e814f-99f6-84a1-c815-11da632caf0c" [ 1780.102511] env[62508]: _type = "Task" [ 1780.102511] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.111807] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e814f-99f6-84a1-c815-11da632caf0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.161441] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1780.190731] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1780.191053] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1780.191291] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1780.191606] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1780.191774] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1780.191930] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1780.192283] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1780.192809] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1780.192809] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1780.192953] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1780.193261] env[62508]: DEBUG nova.virt.hardware [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1780.194335] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3b7391-738d-47da-b341-8c64121e4222 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.207434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c42402-95bc-4fb2-8e93-788a3c15721f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.263918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.264091] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.264256] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1780.290832] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776580, 'name': PowerOffVM_Task, 'duration_secs': 0.655124} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.291199] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1780.291416] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1780.291724] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf8610ef-af4d-4244-be3b-98eaf38099fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.371825] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.372275] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.384547] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.439803] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1780.440147] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1780.440538] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleting the datastore file [datastore1] 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.440847] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c32f969e-2ba8-42c0-8744-58f275dc3a8b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.450054] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1780.450054] env[62508]: value = "task-1776583" [ 1780.450054] env[62508]: _type = "Task" [ 1780.450054] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.462411] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.480691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.513482] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.617987] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529e814f-99f6-84a1-c815-11da632caf0c, 'name': SearchDatastore_Task, 'duration_secs': 0.08724} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.618323] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.618722] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e7f521db-2dab-4c2c-bf2b-aa6e217f29bd/e7f521db-2dab-4c2c-bf2b-aa6e217f29bd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1780.618895] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-781c74ec-50a1-48a0-8b69-c119830fb343 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.630429] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1780.630429] env[62508]: value = "task-1776584" [ 1780.630429] env[62508]: _type = "Task" [ 1780.630429] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.642146] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.808356] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1780.877788] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1780.886195] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.965594] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.983114] env[62508]: DEBUG nova.compute.manager [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received event network-vif-plugged-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1780.983236] env[62508]: DEBUG oslo_concurrency.lockutils [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.983451] env[62508]: DEBUG oslo_concurrency.lockutils [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.983679] env[62508]: DEBUG oslo_concurrency.lockutils [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.983910] env[62508]: DEBUG nova.compute.manager [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] No waiting events found dispatching network-vif-plugged-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1780.984303] env[62508]: WARNING nova.compute.manager [req-f28fc3c0-c275-458a-9ed0-2554c0cd0358 req-3f1cb4be-ac90-492d-bac0-ba140b1bfa0a service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received unexpected event network-vif-plugged-4285d4c3-fb9d-444b-8988-be3ee4475807 for instance with vm_state building and task_state spawning. [ 1781.019607] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1781.019607] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.889s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.019607] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.173s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.019607] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.022193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.542s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.023782] env[62508]: INFO nova.compute.claims [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1781.033279] env[62508]: DEBUG nova.network.neutron [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.069875] env[62508]: INFO nova.scheduler.client.report [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted allocations for instance a10a4217-ae46-4f00-9ba1-cdf74f44ec7b [ 1781.121526] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Successfully updated port: 4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1781.145509] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.385696] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.410519] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.464481] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.543276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.543587] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Instance network_info: |[{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1781.544036] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:f5:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1781.551968] env[62508]: DEBUG oslo.service.loopingcall [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.553187] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1781.553474] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03996842-138e-4ea4-849e-66d3160a76f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.577295] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1781.577295] env[62508]: value = "task-1776586" [ 1781.577295] env[62508]: _type = "Task" [ 1781.577295] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.584043] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c61e428f-7a2c-4750-8e68-e40c95768119 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "a10a4217-ae46-4f00-9ba1-cdf74f44ec7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.980s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.593729] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.625450] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.625767] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.626060] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.647924] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.801235] env[62508]: DEBUG nova.compute.manager [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.801475] env[62508]: DEBUG nova.compute.manager [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1781.801830] env[62508]: DEBUG oslo_concurrency.lockutils [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.804118] env[62508]: DEBUG oslo_concurrency.lockutils [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.804760] env[62508]: DEBUG nova.network.neutron [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1781.886890] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776581, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.435584} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.887277] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cf41ffb1-ea55-4c93-bf70-33b17e44f550/cf41ffb1-ea55-4c93-bf70-33b17e44f550.vmdk to [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1781.888385] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a85021b-c2bf-41bf-bad8-b789dd482439 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.917440] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1781.917866] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fcf68fd-80d4-438b-bf29-a13c59202be7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.941935] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1781.941935] env[62508]: value = "task-1776587" [ 1781.941935] env[62508]: _type = "Task" [ 1781.941935] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.956173] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.966613] env[62508]: DEBUG oslo_vmware.api [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.404738} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.968521] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.968832] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1781.969039] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1781.969232] env[62508]: INFO nova.compute.manager [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Took 2.71 seconds to destroy the instance on the hypervisor. [ 1781.969543] env[62508]: DEBUG oslo.service.loopingcall [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.969763] env[62508]: DEBUG nova.compute.manager [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1781.969856] env[62508]: DEBUG nova.network.neutron [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1782.091764] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.144714] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776584, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.168718] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1782.282624] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bd32c2-a0f0-4535-907e-95ab1e7be64a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.293140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07606c5e-5614-49a8-8713-32c3f69a0a7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.330354] env[62508]: DEBUG nova.network.neutron [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updating instance_info_cache with network_info: [{"id": "4285d4c3-fb9d-444b-8988-be3ee4475807", "address": "fa:16:3e:ef:6e:db", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4285d4c3-fb", "ovs_interfaceid": "4285d4c3-fb9d-444b-8988-be3ee4475807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.332326] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45214781-cf4e-40b7-8611-706433c1019f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.341523] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d832e34-1f8c-4f29-a9f5-bf6ce3f7f555 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.358574] env[62508]: DEBUG nova.compute.provider_tree [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.453514] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.574312] env[62508]: DEBUG nova.network.neutron [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.574670] env[62508]: DEBUG nova.network.neutron [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.589552] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.645423] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776584, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.542827} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.645689] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e7f521db-2dab-4c2c-bf2b-aa6e217f29bd/e7f521db-2dab-4c2c-bf2b-aa6e217f29bd.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1782.645914] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1782.646175] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-543689e8-239d-4283-960d-f53a4b391fb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.653360] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1782.653360] env[62508]: value = "task-1776588" [ 1782.653360] env[62508]: _type = "Task" [ 1782.653360] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.661990] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.684753] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.685066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.685331] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.685562] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.685812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.688866] env[62508]: INFO nova.compute.manager [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Terminating instance [ 1782.690317] env[62508]: DEBUG nova.compute.manager [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1782.690515] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1782.691453] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0520097a-401b-4b90-b235-5957d7f7aca9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.701456] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1782.701769] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7bbfc2b-281c-4871-8e82-e01eaa4dfde0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.712021] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1782.712021] env[62508]: value = "task-1776589" [ 1782.712021] env[62508]: _type = "Task" [ 1782.712021] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.720019] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.788350] env[62508]: DEBUG nova.network.neutron [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.835908] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.836359] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance network_info: |[{"id": "4285d4c3-fb9d-444b-8988-be3ee4475807", "address": "fa:16:3e:ef:6e:db", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4285d4c3-fb", "ovs_interfaceid": "4285d4c3-fb9d-444b-8988-be3ee4475807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1782.836797] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:6e:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4285d4c3-fb9d-444b-8988-be3ee4475807', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1782.845118] env[62508]: DEBUG oslo.service.loopingcall [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.845350] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1782.845582] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-661ca15a-8da2-4014-aa98-cf84e01966d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.862167] env[62508]: DEBUG nova.scheduler.client.report [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.867452] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1782.867452] env[62508]: value = "task-1776591" [ 1782.867452] env[62508]: _type = "Task" [ 1782.867452] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.876643] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.954276] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776587, 'name': ReconfigVM_Task, 'duration_secs': 0.904817} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.954603] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03/a8ce13c4-ea95-4343-8eab-8a0dafbf0e03.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1782.955741] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_type': 'disk', 'boot_index': 0, 'encryption_options': None, 'device_name': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_format': None, 'image_id': 'f81c384b-39f5-44b6-928f-ab9f4bc0a9f7'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'device_type': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368796', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'name': 'volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8ce13c4-ea95-4343-8eab-8a0dafbf0e03', 'attached_at': '', 'detached_at': '', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'serial': '6a9e4102-a8e2-41b9-9290-7b0979ea805b'}, 'disk_bus': None, 'guest_format': None, 'attachment_id': '4649df4e-fc5c-44de-a5a1-5963166523ef', 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=62508) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1782.956023] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1782.956243] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368796', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'name': 'volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8ce13c4-ea95-4343-8eab-8a0dafbf0e03', 'attached_at': '', 'detached_at': '', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'serial': '6a9e4102-a8e2-41b9-9290-7b0979ea805b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1782.957100] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b200f2b6-5f41-401c-b772-3e1045adfee0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.974721] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868c0d64-52b3-40b6-a8f5-9ed5762e0c33 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.004401] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b/volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.004729] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ea89026-7bd1-4381-9fb4-1194c039856b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.022936] env[62508]: DEBUG nova.compute.manager [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received event network-changed-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1783.022936] env[62508]: DEBUG nova.compute.manager [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Refreshing instance network info cache due to event network-changed-4285d4c3-fb9d-444b-8988-be3ee4475807. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1783.023078] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] Acquiring lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.023296] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] Acquired lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.023480] env[62508]: DEBUG nova.network.neutron [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Refreshing network info cache for port 4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.031475] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1783.031475] env[62508]: value = "task-1776592" [ 1783.031475] env[62508]: _type = "Task" [ 1783.031475] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.043838] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.078091] env[62508]: DEBUG oslo_concurrency.lockutils [req-1908f7f3-e032-4cdc-beec-ea11aa13d291 req-b09d5bb2-0eb5-4a08-b940-44b8c1517ce8 service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.090718] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.164652] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079229} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.164870] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1783.165716] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a2cc54-9ce7-4afb-9d51-e555da548c2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.190481] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] e7f521db-2dab-4c2c-bf2b-aa6e217f29bd/e7f521db-2dab-4c2c-bf2b-aa6e217f29bd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.190819] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b0f68b6-0002-4329-ae6e-3ae52468a3b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.215408] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1783.215408] env[62508]: value = "task-1776593" [ 1783.215408] env[62508]: _type = "Task" [ 1783.215408] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.224160] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776589, 'name': PowerOffVM_Task, 'duration_secs': 0.356418} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.227308] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1783.227548] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1783.227840] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.228077] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a2a22eb-6c80-464c-a958-170e45ce1d0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.291112] env[62508]: INFO nova.compute.manager [-] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Took 1.32 seconds to deallocate network for instance. [ 1783.372755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.373196] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1783.375928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.966s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.377468] env[62508]: INFO nova.compute.claims [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1783.386801] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.542094] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776592, 'name': ReconfigVM_Task, 'duration_secs': 0.370458} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.542403] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfigured VM instance instance-0000003c to attach disk [datastore1] volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b/volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1783.547736] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e30b2352-2dd0-4ee7-b81d-9e90a02d204a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.567242] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1783.567242] env[62508]: value = "task-1776595" [ 1783.567242] env[62508]: _type = "Task" [ 1783.567242] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.578291] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.590445] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.729906] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776593, 'name': ReconfigVM_Task, 'duration_secs': 0.483383} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.730213] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Reconfigured VM instance instance-00000060 to attach disk [datastore1] e7f521db-2dab-4c2c-bf2b-aa6e217f29bd/e7f521db-2dab-4c2c-bf2b-aa6e217f29bd.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1783.730894] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa683f2c-848b-4258-ba86-9d072cdf5911 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.737956] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1783.737956] env[62508]: value = "task-1776596" [ 1783.737956] env[62508]: _type = "Task" [ 1783.737956] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.746711] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776596, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.748477] env[62508]: DEBUG nova.network.neutron [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updated VIF entry in instance network info cache for port 4285d4c3-fb9d-444b-8988-be3ee4475807. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.748809] env[62508]: DEBUG nova.network.neutron [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updating instance_info_cache with network_info: [{"id": "4285d4c3-fb9d-444b-8988-be3ee4475807", "address": "fa:16:3e:ef:6e:db", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4285d4c3-fb", "ovs_interfaceid": "4285d4c3-fb9d-444b-8988-be3ee4475807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.798227] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.878965] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.882309] env[62508]: DEBUG nova.compute.utils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.887653] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1783.887858] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1783.928029] env[62508]: DEBUG nova.policy [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5951192f1d594e18adc60a10afc84075', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2aaa479b08b4ad29d8ce07da5802e3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1784.077951] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776595, 'name': ReconfigVM_Task, 'duration_secs': 0.153956} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.078515] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368796', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'name': 'volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8ce13c4-ea95-4343-8eab-8a0dafbf0e03', 'attached_at': '', 'detached_at': '', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'serial': '6a9e4102-a8e2-41b9-9290-7b0979ea805b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1784.079211] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c2cda55-a73f-49ff-afbd-44cae5ee498d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.086765] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1784.086765] env[62508]: value = "task-1776597" [ 1784.086765] env[62508]: _type = "Task" [ 1784.086765] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.093837] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.100632] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776597, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.253124] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] Releasing lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.253880] env[62508]: DEBUG nova.compute.manager [req-a1aca132-2b15-44a5-9eba-4daa08997dc6 req-d1dd87ee-8846-44aa-ad1d-40634069d16b service nova] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Received event network-vif-deleted-6fe0e3e8-4640-43e5-992e-718372bd92d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1784.253880] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776596, 'name': Rename_Task, 'duration_secs': 0.40797} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.254079] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1784.254345] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-293cb7a2-f624-451d-83e7-1769a6547fcd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.263834] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1784.263834] env[62508]: value = "task-1776598" [ 1784.263834] env[62508]: _type = "Task" [ 1784.263834] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.276712] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.277966] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Successfully created port: cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1784.379701] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.391320] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1784.601157] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.608439] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776597, 'name': Rename_Task, 'duration_secs': 0.1924} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.609309] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1784.609309] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2049156-0def-417b-9071-554108c870ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.612095] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d16463-0b16-4a60-bae9-5e0480c0fb13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.621943] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fea6bd4-cc20-4153-88ca-794ad68a601a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.626344] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1784.626344] env[62508]: value = "task-1776599" [ 1784.626344] env[62508]: _type = "Task" [ 1784.626344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.655768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22861f8b-3fa3-4bf3-b72c-96d9dceed820 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.661861] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776599, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.667776] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395d9d60-7484-4645-9b87-0c970680e867 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.684082] env[62508]: DEBUG nova.compute.provider_tree [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.777351] env[62508]: DEBUG oslo_vmware.api [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776598, 'name': PowerOnVM_Task, 'duration_secs': 0.497311} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.777718] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1784.777776] env[62508]: INFO nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Took 9.44 seconds to spawn the instance on the hypervisor. [ 1784.777931] env[62508]: DEBUG nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1784.779047] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea6e406-6edd-404a-b1f2-746cb5a3ead8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.880297] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.094209] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.136949] env[62508]: DEBUG oslo_vmware.api [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776599, 'name': PowerOnVM_Task, 'duration_secs': 0.46845} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.138105] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.187360] env[62508]: DEBUG nova.scheduler.client.report [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1785.245616] env[62508]: DEBUG nova.compute.manager [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1785.246529] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8484be-73d1-42bd-b877-43e981d60580 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.300243] env[62508]: INFO nova.compute.manager [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Took 31.14 seconds to build instance. [ 1785.381488] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.402892] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1785.427101] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.427454] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.428492] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.428492] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.428492] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.428492] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.428492] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.428706] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.428770] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.428926] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.429142] env[62508]: DEBUG nova.virt.hardware [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.430348] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6d947d-5caf-4d63-878e-c7edd9c3bca3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.439494] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3543523-8a48-466d-86f7-994c923ab9df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.531738] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.531999] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.593704] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.693492] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.694043] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1785.696772] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.899s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.697006] env[62508]: DEBUG nova.objects.instance [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'resources' on Instance uuid 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.766411] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2ba01240-84bf-4cfe-b5a1-c5617ff0322a tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 44.502s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.802260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-067ee2c2-1e0a-4f03-969b-42b7f2f6fbb5 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.645s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.882442] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.035716] env[62508]: DEBUG nova.compute.utils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1786.096148] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.202025] env[62508]: DEBUG nova.compute.utils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1786.202025] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1786.202025] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1786.257820] env[62508]: DEBUG nova.policy [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2561183ef9c54615988c33906fc5f84e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce0dd059301e41abb3758625d38e435e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1786.384617] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.412691] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a816494-3d43-498c-8381-d52b63aaf726 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.422218] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713e0f46-fe70-4ad7-bab3-a0031a11d4fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.456905] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5616eb-5526-4653-8705-53b23e0ff12e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.466110] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244cc68d-5f99-4530-bf8d-085f3e2ace4c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.480598] env[62508]: DEBUG nova.compute.provider_tree [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1786.538965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.554875] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Successfully created port: e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1786.601190] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.705044] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1786.753116] env[62508]: DEBUG nova.compute.manager [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Received event network-changed-792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1786.753116] env[62508]: DEBUG nova.compute.manager [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Refreshing instance network info cache due to event network-changed-792efee6-358d-45ae-b4ff-79e8fea4ff64. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1786.753116] env[62508]: DEBUG oslo_concurrency.lockutils [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] Acquiring lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.753116] env[62508]: DEBUG oslo_concurrency.lockutils [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] Acquired lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.753116] env[62508]: DEBUG nova.network.neutron [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Refreshing network info cache for port 792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1786.888243] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.983923] env[62508]: DEBUG nova.scheduler.client.report [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1787.098770] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.387132] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.489303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.792s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.517030] env[62508]: INFO nova.scheduler.client.report [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted allocations for instance 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3 [ 1787.530867] env[62508]: DEBUG nova.network.neutron [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updated VIF entry in instance network info cache for port 792efee6-358d-45ae-b4ff-79e8fea4ff64. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1787.531229] env[62508]: DEBUG nova.network.neutron [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updating instance_info_cache with network_info: [{"id": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "address": "fa:16:3e:51:1a:93", "network": {"id": "65a331e8-0460-4acd-b98d-a2e2e25a0e72", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1387918119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a10752e7a94a038d1bebf4a2bf4986", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c06e3c2-8edb-4cf0-be6b-45dfe059c00b", "external-id": "nsx-vlan-transportzone-264", "segmentation_id": 264, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792efee6-35", "ovs_interfaceid": "792efee6-358d-45ae-b4ff-79e8fea4ff64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.599989] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.604449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.604725] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.604964] env[62508]: INFO nova.compute.manager [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Attaching volume 486f8980-1071-4c4e-aa7e-b41e69850aa9 to /dev/sdb [ 1787.645151] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4534a8-3179-47aa-9458-8c42e3019725 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.653549] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91263ac6-858a-44be-bf1b-8321d50ae1ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.668957] env[62508]: DEBUG nova.virt.block_device [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating existing volume attachment record: e87ae591-f133-42bc-8c70-b1209f287303 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1787.715921] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1787.744352] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1787.744664] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1787.744867] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1787.745339] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1787.745339] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1787.745470] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1787.745687] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1787.745887] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1787.746098] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1787.746307] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1787.746532] env[62508]: DEBUG nova.virt.hardware [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1787.747797] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6750e9ff-9082-43b7-9ee5-79b351b6f96d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.759021] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f209b5b7-e19f-46cc-ab9b-f506e92403b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.887196] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.027944] env[62508]: DEBUG oslo_concurrency.lockutils [None req-327bf491-9e6b-47e9-9525-ec6186957ad7 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.777s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.033836] env[62508]: DEBUG oslo_concurrency.lockutils [req-4afa3776-37e7-4b81-af39-909b81148fef req-96fea979-fe82-4f3c-bff5-5a79ab6897c4 service nova] Releasing lock "refresh_cache-e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.098918] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.215805] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "5da47620-3979-44e8-91c5-154a1fe4ee48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.216125] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.216360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.216581] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.216843] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.219121] env[62508]: INFO nova.compute.manager [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Terminating instance [ 1788.221337] env[62508]: DEBUG nova.compute.manager [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1788.221410] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1788.222275] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a504561-1757-443b-a706-7062e41250d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.231130] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.231422] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-638c7064-2b8a-42e0-b39b-25c1b3578194 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.239066] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1788.239066] env[62508]: value = "task-1776604" [ 1788.239066] env[62508]: _type = "Task" [ 1788.239066] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.248186] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776604, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.390132] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.603767] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.749743] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776604, 'name': PowerOffVM_Task, 'duration_secs': 0.25394} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.750044] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1788.750219] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1788.750467] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d92d280-4ebd-4858-8de6-cca0660655b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.895270] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.895515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.896990] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.992328] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1788.992690] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1788.992935] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleting the datastore file [datastore1] 5da47620-3979-44e8-91c5-154a1fe4ee48 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.993302] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ad21723-5285-41d1-b864-0f5a4d4324a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.005874] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1789.005874] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1789.005979] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleting the datastore file [datastore1] 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1789.007712] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fef5d5c1-7ef4-40b4-b7d8-1557551eee9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.009974] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1789.009974] env[62508]: value = "task-1776606" [ 1789.009974] env[62508]: _type = "Task" [ 1789.009974] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.017066] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for the task: (returnval){ [ 1789.017066] env[62508]: value = "task-1776607" [ 1789.017066] env[62508]: _type = "Task" [ 1789.017066] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.024922] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.030870] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.102821] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.278896] env[62508]: DEBUG nova.compute.manager [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Received event network-vif-plugged-cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.279163] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] Acquiring lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.279389] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.279629] env[62508]: DEBUG oslo_concurrency.lockutils [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.279836] env[62508]: DEBUG nova.compute.manager [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] No waiting events found dispatching network-vif-plugged-cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1789.280174] env[62508]: WARNING nova.compute.manager [req-9ae99cf4-e824-4252-9552-f24269e1ee10 req-8a0d29ba-d154-4553-9cac-e9cd7e5deac2 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Received unexpected event network-vif-plugged-cf175192-2f32-4a16-aa1c-26be6500c839 for instance with vm_state building and task_state spawning. [ 1789.385526] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Successfully updated port: cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1789.393887] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776591, 'name': CreateVM_Task, 'duration_secs': 6.188612} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.394093] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1789.394803] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.394964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.395295] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1789.395705] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b9ea12-e308-4553-a26b-de567fe1c55f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.397623] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1789.405955] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1789.405955] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52255098-810d-de14-f17f-0ef2506bdf6e" [ 1789.405955] env[62508]: _type = "Task" [ 1789.405955] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.416271] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52255098-810d-de14-f17f-0ef2506bdf6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.484169] env[62508]: DEBUG nova.compute.manager [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Received event network-vif-plugged-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.485010] env[62508]: DEBUG oslo_concurrency.lockutils [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] Acquiring lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.485213] env[62508]: DEBUG oslo_concurrency.lockutils [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.485393] env[62508]: DEBUG oslo_concurrency.lockutils [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.485563] env[62508]: DEBUG nova.compute.manager [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] No waiting events found dispatching network-vif-plugged-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1789.485731] env[62508]: WARNING nova.compute.manager [req-4ac80277-76f1-4ced-8afe-265ebd1a2583 req-39f61d75-3f9a-4d4d-8eb7-c76147e5bbe9 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Received unexpected event network-vif-plugged-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 for instance with vm_state building and task_state spawning. [ 1789.520695] env[62508]: DEBUG oslo_vmware.api [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310168} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.523880] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.524094] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1789.524277] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1789.524449] env[62508]: INFO nova.compute.manager [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1789.524692] env[62508]: DEBUG oslo.service.loopingcall [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.524885] env[62508]: DEBUG nova.compute.manager [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1789.524979] env[62508]: DEBUG nova.network.neutron [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1789.532378] env[62508]: DEBUG oslo_vmware.api [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Task: {'id': task-1776607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341702} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.533177] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.533177] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1789.533177] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1789.533177] env[62508]: INFO nova.compute.manager [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Took 6.84 seconds to destroy the instance on the hypervisor. [ 1789.533417] env[62508]: DEBUG oslo.service.loopingcall [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.533515] env[62508]: DEBUG nova.compute.manager [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1789.533610] env[62508]: DEBUG nova.network.neutron [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1789.601571] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776586, 'name': CreateVM_Task, 'duration_secs': 7.536224} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.601744] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1789.602410] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.630575] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Successfully updated port: e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1789.887639] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.887928] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquired lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.887928] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1789.918414] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52255098-810d-de14-f17f-0ef2506bdf6e, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.918829] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.919145] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1789.919438] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.919636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.919843] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.920184] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.920539] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1789.920798] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c151eaf-61c8-42c9-8c0c-91878096cd12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.923890] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.924171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.925838] env[62508]: INFO nova.compute.claims [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1789.928558] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-974a0d7c-7586-4e63-9f91-03760bf74523 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.935336] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1789.935336] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d74f54-8f22-9d21-2401-da9b5aa54e7f" [ 1789.935336] env[62508]: _type = "Task" [ 1789.935336] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.939830] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.940046] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1789.941124] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9457362-3af9-4ffa-8aba-a3f5391947d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.946576] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d74f54-8f22-9d21-2401-da9b5aa54e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.949853] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1789.949853] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed8a69-c13a-ee14-5ced-6b32a9c0b17c" [ 1789.949853] env[62508]: _type = "Task" [ 1789.949853] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.958932] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed8a69-c13a-ee14-5ced-6b32a9c0b17c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.136534] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.136686] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.136865] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1790.356749] env[62508]: DEBUG nova.network.neutron [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.388435] env[62508]: DEBUG nova.network.neutron [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.423724] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1790.447990] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d74f54-8f22-9d21-2401-da9b5aa54e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.448323] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.448559] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.449705] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.460228] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ed8a69-c13a-ee14-5ced-6b32a9c0b17c, 'name': SearchDatastore_Task, 'duration_secs': 0.010157} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.461291] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b5f2156-d183-43e7-bc04-9266c28ac5c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.467030] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1790.467030] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5223633b-1aa8-264e-2fb9-c8642712d579" [ 1790.467030] env[62508]: _type = "Task" [ 1790.467030] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.479656] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5223633b-1aa8-264e-2fb9-c8642712d579, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.576171] env[62508]: DEBUG nova.network.neutron [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Updating instance_info_cache with network_info: [{"id": "cf175192-2f32-4a16-aa1c-26be6500c839", "address": "fa:16:3e:72:cf:56", "network": {"id": "9e998fc6-a785-482d-a849-d43efe7b8f86", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-426136197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2aaa479b08b4ad29d8ce07da5802e3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf175192-2f", "ovs_interfaceid": "cf175192-2f32-4a16-aa1c-26be6500c839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.698604] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1790.835701] env[62508]: DEBUG nova.network.neutron [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.858137] env[62508]: INFO nova.compute.manager [-] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Took 1.33 seconds to deallocate network for instance. [ 1790.891603] env[62508]: INFO nova.compute.manager [-] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Took 1.36 seconds to deallocate network for instance. [ 1790.979817] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5223633b-1aa8-264e-2fb9-c8642712d579, 'name': SearchDatastore_Task, 'duration_secs': 0.011437} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.980127] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.980402] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a15f3cef-c260-4a54-83af-7cccf81e15a6/a15f3cef-c260-4a54-83af-7cccf81e15a6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1790.980844] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.981072] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1790.981320] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2947bf78-b743-4b1b-b377-d655d3b86321 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.983783] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76e61fc3-3dfe-4059-ba5f-297059f47723 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.992248] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1790.992248] env[62508]: value = "task-1776609" [ 1790.992248] env[62508]: _type = "Task" [ 1790.992248] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.996754] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1790.996964] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1790.998144] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86a8507d-87d3-4ad8-bc09-84302c5e9689 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.007184] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.012468] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1791.012468] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520ffd98-68e5-bda5-c95c-c2575cef4c0a" [ 1791.012468] env[62508]: _type = "Task" [ 1791.012468] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.023380] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520ffd98-68e5-bda5-c95c-c2575cef4c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.079677] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Releasing lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.080076] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Instance network_info: |[{"id": "cf175192-2f32-4a16-aa1c-26be6500c839", "address": "fa:16:3e:72:cf:56", "network": {"id": "9e998fc6-a785-482d-a849-d43efe7b8f86", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-426136197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2aaa479b08b4ad29d8ce07da5802e3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf175192-2f", "ovs_interfaceid": "cf175192-2f32-4a16-aa1c-26be6500c839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1791.080555] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:cf:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf175192-2f32-4a16-aa1c-26be6500c839', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.089249] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Creating folder: Project (d2aaa479b08b4ad29d8ce07da5802e3e). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1791.092235] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41de6a31-368c-476a-a80d-f540bb74e582 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.108345] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Created folder: Project (d2aaa479b08b4ad29d8ce07da5802e3e) in parent group-v368536. [ 1791.108570] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Creating folder: Instances. Parent ref: group-v368814. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1791.108835] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-820026b4-0d5d-481b-b22e-7d3b9814b2a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.123466] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Created folder: Instances in parent group-v368814. [ 1791.123843] env[62508]: DEBUG oslo.service.loopingcall [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.124148] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.124465] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aec198d-b163-45ff-a093-2b95c6f59037 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.153029] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.153029] env[62508]: value = "task-1776612" [ 1791.153029] env[62508]: _type = "Task" [ 1791.153029] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.166789] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776612, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.174838] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ffa01b-32da-482e-9047-a8da027c7acd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.183044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594eaa35-6c40-4622-90cc-99885f129132 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.217843] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1042031-6145-4ebc-9fa5-79175f7a236a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.226337] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419dd974-e054-42c1-9ad3-4ebfc058852e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.242395] env[62508]: DEBUG nova.compute.provider_tree [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1791.319618] env[62508]: DEBUG nova.compute.manager [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Received event network-changed-cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.319731] env[62508]: DEBUG nova.compute.manager [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Refreshing instance network info cache due to event network-changed-cf175192-2f32-4a16-aa1c-26be6500c839. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1791.319960] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] Acquiring lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.320113] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] Acquired lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.320275] env[62508]: DEBUG nova.network.neutron [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Refreshing network info cache for port cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1791.338259] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.338597] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Instance network_info: |[{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1791.339338] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:39:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0bb9906-0d2a-4bdb-bbe4-5a3074c66499', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.347241] env[62508]: DEBUG oslo.service.loopingcall [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.347534] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.348176] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-549ab966-73d6-47f3-8513-a6e8cd775560 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.364697] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.372602] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.372602] env[62508]: value = "task-1776613" [ 1791.372602] env[62508]: _type = "Task" [ 1791.372602] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.383082] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776613, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.399704] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.506076] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776609, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.525971] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520ffd98-68e5-bda5-c95c-c2575cef4c0a, 'name': SearchDatastore_Task, 'duration_secs': 0.011539} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.528980] env[62508]: DEBUG nova.compute.manager [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Received event network-changed-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.529201] env[62508]: DEBUG nova.compute.manager [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Refreshing instance network info cache due to event network-changed-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1791.529830] env[62508]: DEBUG oslo_concurrency.lockutils [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.529993] env[62508]: DEBUG oslo_concurrency.lockutils [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.530179] env[62508]: DEBUG nova.network.neutron [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Refreshing network info cache for port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1791.532059] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33242303-3b84-4773-9ad5-e4592388d800 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.540516] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1791.540516] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ab638-d455-9577-1ed8-a469338891ac" [ 1791.540516] env[62508]: _type = "Task" [ 1791.540516] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.556106] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ab638-d455-9577-1ed8-a469338891ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.665863] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776612, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.746589] env[62508]: DEBUG nova.scheduler.client.report [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1791.883980] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776613, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.006999] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653315} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.007370] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a15f3cef-c260-4a54-83af-7cccf81e15a6/a15f3cef-c260-4a54-83af-7cccf81e15a6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1792.008054] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1792.008054] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c166819e-6721-4d95-a673-c21d76105f2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.016930] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1792.016930] env[62508]: value = "task-1776614" [ 1792.016930] env[62508]: _type = "Task" [ 1792.016930] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.027511] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.051443] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ab638-d455-9577-1ed8-a469338891ac, 'name': SearchDatastore_Task, 'duration_secs': 0.059602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.051738] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.052032] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 0a4958d5-b9a9-4854-90ca-f19eb34cb15b/0a4958d5-b9a9-4854-90ca-f19eb34cb15b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1792.052312] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f264154d-ec15-486d-bb13-35554b969d2a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.062166] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1792.062166] env[62508]: value = "task-1776615" [ 1792.062166] env[62508]: _type = "Task" [ 1792.062166] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.074380] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.110450] env[62508]: DEBUG nova.network.neutron [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Updated VIF entry in instance network info cache for port cf175192-2f32-4a16-aa1c-26be6500c839. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1792.110450] env[62508]: DEBUG nova.network.neutron [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Updating instance_info_cache with network_info: [{"id": "cf175192-2f32-4a16-aa1c-26be6500c839", "address": "fa:16:3e:72:cf:56", "network": {"id": "9e998fc6-a785-482d-a849-d43efe7b8f86", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-426136197-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2aaa479b08b4ad29d8ce07da5802e3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf175192-2f", "ovs_interfaceid": "cf175192-2f32-4a16-aa1c-26be6500c839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.170361] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776612, 'name': CreateVM_Task, 'duration_secs': 0.659838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.174041] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.175113] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.175349] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.175823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.176859] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e3ad66b-cba0-4779-98ec-3ffecf1092d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.185193] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1792.185193] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c59cb-54fc-3e4b-ff86-0ce033ec6b94" [ 1792.185193] env[62508]: _type = "Task" [ 1792.185193] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.201652] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c59cb-54fc-3e4b-ff86-0ce033ec6b94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.252147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.252554] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1792.259084] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.894s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.259465] env[62508]: DEBUG nova.objects.instance [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'resources' on Instance uuid 5da47620-3979-44e8-91c5-154a1fe4ee48 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.348628] env[62508]: DEBUG nova.network.neutron [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updated VIF entry in instance network info cache for port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1792.349032] env[62508]: DEBUG nova.network.neutron [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.384541] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776613, 'name': CreateVM_Task, 'duration_secs': 0.605897} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.384821] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.385449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.527114] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11166} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.527371] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1792.528175] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ca24f6-94a1-4c8e-bf27-98623814454c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.552017] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] a15f3cef-c260-4a54-83af-7cccf81e15a6/a15f3cef-c260-4a54-83af-7cccf81e15a6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.552310] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47d1b231-1049-4eb7-be96-41c186634ec4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.576717] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.578153] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1792.578153] env[62508]: value = "task-1776616" [ 1792.578153] env[62508]: _type = "Task" [ 1792.578153] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.587279] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776616, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.616748] env[62508]: DEBUG oslo_concurrency.lockutils [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] Releasing lock "refresh_cache-3e79a6d4-8639-478e-8753-71ff0e07496f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.616748] env[62508]: DEBUG nova.compute.manager [req-2b2711f5-6907-4f52-895f-3d849dfbc2ae req-b66f18a4-1615-4f80-93e0-3b3b7bf885d8 service nova] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Received event network-vif-deleted-65b1bfe7-bc3c-4538-9aab-563919b69ba3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1792.697133] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528c59cb-54fc-3e4b-ff86-0ce033ec6b94, 'name': SearchDatastore_Task, 'duration_secs': 0.083049} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.697133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.697133] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.697133] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.697417] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.697494] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.697965] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.698306] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.698541] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f30d86d-adaa-4719-9a10-271dad8157da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.700639] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5cc441d-39a6-4203-9500-a1ab784a28fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.706386] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1792.706386] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5270fdbf-3fa8-8d5f-ec3a-7752dcaaeae5" [ 1792.706386] env[62508]: _type = "Task" [ 1792.706386] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.711239] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.711458] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.712555] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ac58a72-03d2-4edc-b095-e0f18aa7cf97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.717903] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5270fdbf-3fa8-8d5f-ec3a-7752dcaaeae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.718947] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1792.719174] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368813', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'name': 'volume-486f8980-1071-4c4e-aa7e-b41e69850aa9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2aeb5a4c-785a-4238-8575-ecd1ff84b97c', 'attached_at': '', 'detached_at': '', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'serial': '486f8980-1071-4c4e-aa7e-b41e69850aa9'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1792.720459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d08237a-67df-47a0-8245-5b694ed671e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.724310] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1792.724310] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52486739-eaec-68d0-1771-242a4a8d0952" [ 1792.724310] env[62508]: _type = "Task" [ 1792.724310] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.744552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e4d696-6f26-4342-b723-72dda19c79c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.753778] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52486739-eaec-68d0-1771-242a4a8d0952, 'name': SearchDatastore_Task, 'duration_secs': 0.009633} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.755472] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02c68d77-5a9c-4c32-89cf-cdc13ff08471 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.774950] env[62508]: DEBUG nova.compute.utils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1792.789156] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] volume-486f8980-1071-4c4e-aa7e-b41e69850aa9/volume-486f8980-1071-4c4e-aa7e-b41e69850aa9.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.791343] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1792.794569] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1792.794801] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1792.796810] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-445b2839-2830-4bce-8ca1-2d70361151fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.813864] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1792.813864] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ce5334-d9b8-368e-5fc4-904834573560" [ 1792.813864] env[62508]: _type = "Task" [ 1792.813864] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.824929] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1792.824929] env[62508]: value = "task-1776617" [ 1792.824929] env[62508]: _type = "Task" [ 1792.824929] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.834090] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ce5334-d9b8-368e-5fc4-904834573560, 'name': SearchDatastore_Task, 'duration_secs': 0.011752} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.834994] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.835415] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 3e79a6d4-8639-478e-8753-71ff0e07496f/3e79a6d4-8639-478e-8753-71ff0e07496f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1792.838795] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b029ea95-8d65-4559-9377-38268e53d01d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.847238] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776617, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.851679] env[62508]: DEBUG oslo_concurrency.lockutils [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.852170] env[62508]: DEBUG nova.compute.manager [req-d5fc4662-af98-4976-a290-fabebe7a75ae req-3a9b14fa-8426-4a3c-a2b0-95cafab9fc0c service nova] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Received event network-vif-deleted-985905ec-2a79-4b7a-b4ad-d3bf00a42f43 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1792.855361] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1792.855361] env[62508]: value = "task-1776618" [ 1792.855361] env[62508]: _type = "Task" [ 1792.855361] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.856765] env[62508]: DEBUG nova.policy [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '285fedd2e7fd4d259ca7fc57c3fcbf46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74c45615efbb425fbec8400f6d225892', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1792.872326] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.054482] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6e20c8-11c5-4480-a4fc-3cf18c984bfe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.064408] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058409c3-71d5-42c7-88a1-7bb9a505e6dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.076541] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.106263] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514a63e2-2eb0-4f6a-a917-0417db7c8768 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.115288] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776616, 'name': ReconfigVM_Task, 'duration_secs': 0.458032} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.117624] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Reconfigured VM instance instance-00000062 to attach disk [datastore1] a15f3cef-c260-4a54-83af-7cccf81e15a6/a15f3cef-c260-4a54-83af-7cccf81e15a6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.118416] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cea3da43-3b8b-46fc-be01-896416fa5467 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.121428] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed421eff-27b6-47dc-bfe5-6de3a4339111 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.138737] env[62508]: DEBUG nova.compute.provider_tree [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.141700] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1793.141700] env[62508]: value = "task-1776619" [ 1793.141700] env[62508]: _type = "Task" [ 1793.141700] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.151937] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776619, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.220394] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5270fdbf-3fa8-8d5f-ec3a-7752dcaaeae5, 'name': SearchDatastore_Task, 'duration_secs': 0.010877} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.221110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.221425] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.221730] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.221924] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.222191] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1793.222536] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecb47801-45b8-4a26-a648-50fb2f204119 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.242620] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1793.242928] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1793.243898] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b31c1e-e0ea-42d1-bd40-c5649c6414fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.254025] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1793.254025] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d07112-c8a3-d358-d1a0-08b030c9383c" [ 1793.254025] env[62508]: _type = "Task" [ 1793.254025] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.262911] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d07112-c8a3-d358-d1a0-08b030c9383c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.284780] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Successfully created port: ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1793.314536] env[62508]: INFO nova.virt.block_device [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Booting with volume 3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2 at /dev/sda [ 1793.338429] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776617, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.364824] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31b8b6b8-49d5-4692-a333-d8b2256c5997 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.375758] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776618, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.380814] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b672df67-fcf1-44be-8513-6e3b125cb74f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.418719] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-edf78c87-b0c2-4285-88cb-986efb42866e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.430346] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa66f9ef-b6e1-4391-baff-a2f9ef060ccd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.470293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ec4685-8381-48e3-a812-342f2e323b1f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.479271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f12cb25-e3f7-47d2-9b4c-c0c649ccfa02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.497039] env[62508]: DEBUG nova.virt.block_device [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating existing volume attachment record: 6ecda9fb-bfe1-48cf-a5b1-a0439cefcacb {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1793.583623] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776615, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.644361] env[62508]: DEBUG nova.scheduler.client.report [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.657715] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776619, 'name': Rename_Task, 'duration_secs': 0.160425} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.657715] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1793.657715] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-748a72e6-1d71-42e1-b220-8787a61aa728 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.670288] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1793.670288] env[62508]: value = "task-1776620" [ 1793.670288] env[62508]: _type = "Task" [ 1793.670288] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.681030] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.764469] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d07112-c8a3-d358-d1a0-08b030c9383c, 'name': SearchDatastore_Task, 'duration_secs': 0.072018} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.765299] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e3a96d9-42bc-4cc4-80ca-bcec69778fc0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.771526] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1793.771526] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c20899-5f29-07a5-34f7-de956bccf008" [ 1793.771526] env[62508]: _type = "Task" [ 1793.771526] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.782751] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c20899-5f29-07a5-34f7-de956bccf008, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.836310] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776617, 'name': ReconfigVM_Task, 'duration_secs': 0.532636} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.836588] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to attach disk [datastore1] volume-486f8980-1071-4c4e-aa7e-b41e69850aa9/volume-486f8980-1071-4c4e-aa7e-b41e69850aa9.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.841611] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74868e37-ef28-4bb1-83e7-70e4415b13ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.856572] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1793.856572] env[62508]: value = "task-1776621" [ 1793.856572] env[62508]: _type = "Task" [ 1793.856572] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.867760] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.928773} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.870951] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 3e79a6d4-8639-478e-8753-71ff0e07496f/3e79a6d4-8639-478e-8753-71ff0e07496f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1793.871188] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1793.871479] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.871692] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f5d27a4-5544-4afa-a52b-2f628a420f7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.879123] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1793.879123] env[62508]: value = "task-1776622" [ 1793.879123] env[62508]: _type = "Task" [ 1793.879123] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.886729] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.079012] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776615, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.7091} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.079382] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 0a4958d5-b9a9-4854-90ca-f19eb34cb15b/0a4958d5-b9a9-4854-90ca-f19eb34cb15b.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.079659] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.079975] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd328474-55e9-4720-ad28-867f67eabf0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.088015] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1794.088015] env[62508]: value = "task-1776623" [ 1794.088015] env[62508]: _type = "Task" [ 1794.088015] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.096636] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.151402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.154519] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.754s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.154815] env[62508]: DEBUG nova.objects.instance [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lazy-loading 'resources' on Instance uuid 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1794.171055] env[62508]: INFO nova.scheduler.client.report [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted allocations for instance 5da47620-3979-44e8-91c5-154a1fe4ee48 [ 1794.188807] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776620, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.284276] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c20899-5f29-07a5-34f7-de956bccf008, 'name': SearchDatastore_Task, 'duration_secs': 0.010585} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.284681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.284983] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1794.285395] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5feda00a-0d8a-4cc8-80b7-04b9c93e5b31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.296168] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1794.296168] env[62508]: value = "task-1776624" [ 1794.296168] env[62508]: _type = "Task" [ 1794.296168] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.305954] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.368604] env[62508]: DEBUG oslo_vmware.api [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776621, 'name': ReconfigVM_Task, 'duration_secs': 0.187563} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.368992] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368813', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'name': 'volume-486f8980-1071-4c4e-aa7e-b41e69850aa9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2aeb5a4c-785a-4238-8575-ecd1ff84b97c', 'attached_at': '', 'detached_at': '', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'serial': '486f8980-1071-4c4e-aa7e-b41e69850aa9'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1794.388603] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.388872] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.389754] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c8094f-928e-4b63-82be-5cd48a3dcb2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.423118] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 3e79a6d4-8639-478e-8753-71ff0e07496f/3e79a6d4-8639-478e-8753-71ff0e07496f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.423458] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58b06c2a-80b5-4373-a3c3-0f68869113ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.445570] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1794.445570] env[62508]: value = "task-1776625" [ 1794.445570] env[62508]: _type = "Task" [ 1794.445570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.454840] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.603660] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075439} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.604107] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.605110] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c08247-a08a-4726-84e0-9d4b4c9563f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.638706] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 0a4958d5-b9a9-4854-90ca-f19eb34cb15b/0a4958d5-b9a9-4854-90ca-f19eb34cb15b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.639378] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-505c22d1-3ed8-4458-82cd-30c8972ffdab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.668775] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1794.668775] env[62508]: value = "task-1776626" [ 1794.668775] env[62508]: _type = "Task" [ 1794.668775] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.692347] env[62508]: DEBUG oslo_vmware.api [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776620, 'name': PowerOnVM_Task, 'duration_secs': 0.625531} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.692587] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776626, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.693292] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4a5cc177-dc77-4c7d-9ae5-6cb6c2f11605 tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "5da47620-3979-44e8-91c5-154a1fe4ee48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.477s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.694429] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1794.694429] env[62508]: INFO nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Took 14.53 seconds to spawn the instance on the hypervisor. [ 1794.694562] env[62508]: DEBUG nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1794.695701] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fc058d-e56c-4f1d-9f27-6a3dd43ec3d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.806702] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776624, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508478} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.806991] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.807403] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.807712] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-163804af-5944-4fa3-8ba0-48ca91022452 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.817857] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1794.817857] env[62508]: value = "task-1776627" [ 1794.817857] env[62508]: _type = "Task" [ 1794.817857] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.826177] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.927147] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d579b2-5ce0-424d-bd7a-1d3f94159ea0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.933398] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5caac78-b509-4b3f-928b-94e7ce589ee4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.969111] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496d2b26-5f15-4af4-ae27-484a7de22fde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.978288] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.981797] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd23031-5875-4c29-9565-7c0b1c15aae3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.997437] env[62508]: DEBUG nova.compute.provider_tree [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1795.038952] env[62508]: DEBUG nova.compute.manager [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Received event network-vif-plugged-ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.039192] env[62508]: DEBUG oslo_concurrency.lockutils [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.039446] env[62508]: DEBUG oslo_concurrency.lockutils [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.039643] env[62508]: DEBUG oslo_concurrency.lockutils [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.039953] env[62508]: DEBUG nova.compute.manager [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] No waiting events found dispatching network-vif-plugged-ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.040180] env[62508]: WARNING nova.compute.manager [req-ecdf50ec-11bc-47e4-8d62-b8b37baf433d req-1260abd7-898b-4a7a-8215-e33649e05d73 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Received unexpected event network-vif-plugged-ed5b1d50-d456-43d1-887a-96dcb4f42cec for instance with vm_state building and task_state block_device_mapping. [ 1795.179754] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776626, 'name': ReconfigVM_Task, 'duration_secs': 0.376128} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.180114] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 0a4958d5-b9a9-4854-90ca-f19eb34cb15b/0a4958d5-b9a9-4854-90ca-f19eb34cb15b.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.180687] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dce349ea-f38b-4c38-829a-a29b4a750e3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.186992] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1795.186992] env[62508]: value = "task-1776628" [ 1795.186992] env[62508]: _type = "Task" [ 1795.186992] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.195454] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776628, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.207494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.207494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.207764] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.207974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.208154] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.210643] env[62508]: INFO nova.compute.manager [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Terminating instance [ 1795.212565] env[62508]: DEBUG nova.compute.manager [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1795.212806] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1795.213833] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e401c45-2a0a-4ef1-bc62-b2c25d05125e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.224748] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.224998] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6196f1f-a84f-40b7-beec-35a138f82c6f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.229127] env[62508]: INFO nova.compute.manager [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Took 30.53 seconds to build instance. [ 1795.235924] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1795.235924] env[62508]: value = "task-1776629" [ 1795.235924] env[62508]: _type = "Task" [ 1795.235924] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.245199] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.258169] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.258169] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.258169] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.258373] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.258455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.261320] env[62508]: INFO nova.compute.manager [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Terminating instance [ 1795.263283] env[62508]: DEBUG nova.compute.manager [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1795.263499] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1795.264402] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7549ac-9caf-4d0f-8093-174e1ec7f1db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.272871] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.273043] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93576ee9-adc8-47b9-a520-ffe08ef17549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.280473] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1795.280473] env[62508]: value = "task-1776630" [ 1795.280473] env[62508]: _type = "Task" [ 1795.280473] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.290227] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.327416] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076312} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.327772] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1795.328662] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7b58b8-840f-4c8a-9862-8b7731fc2b5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.353233] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.353598] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42680dd1-9921-46d6-a084-cb0d0c317fb8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.375506] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1795.375506] env[62508]: value = "task-1776631" [ 1795.375506] env[62508]: _type = "Task" [ 1795.375506] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.384381] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.423560] env[62508]: DEBUG nova.objects.instance [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'flavor' on Instance uuid 2aeb5a4c-785a-4238-8575-ecd1ff84b97c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.456204] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Successfully updated port: ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.478198] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776625, 'name': ReconfigVM_Task, 'duration_secs': 0.735982} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.478507] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 3e79a6d4-8639-478e-8753-71ff0e07496f/3e79a6d4-8639-478e-8753-71ff0e07496f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.479199] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3c4c3e4-1ea2-41cd-8fb4-b966e63f3196 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.483192] env[62508]: DEBUG nova.compute.manager [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Received event network-changed-ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.483373] env[62508]: DEBUG nova.compute.manager [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Refreshing instance network info cache due to event network-changed-ed5b1d50-d456-43d1-887a-96dcb4f42cec. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1795.483567] env[62508]: DEBUG oslo_concurrency.lockutils [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.483670] env[62508]: DEBUG oslo_concurrency.lockutils [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.483833] env[62508]: DEBUG nova.network.neutron [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Refreshing network info cache for port ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1795.491372] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1795.491372] env[62508]: value = "task-1776632" [ 1795.491372] env[62508]: _type = "Task" [ 1795.491372] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.504386] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776632, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.522919] env[62508]: ERROR nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [req-18236182-4f61-48d3-99fe-7de7293b011d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-18236182-4f61-48d3-99fe-7de7293b011d"}]} [ 1795.544366] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1795.561364] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1795.561588] env[62508]: DEBUG nova.compute.provider_tree [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1795.573630] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1795.595640] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1795.601095] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1795.601713] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1795.602041] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1795.602300] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1795.602592] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1795.603560] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1795.603560] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1795.603560] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1795.603718] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1795.603945] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1795.604198] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1795.604464] env[62508]: DEBUG nova.virt.hardware [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1795.605604] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9a6b82-ef23-43f1-9476-33ee43b6d587 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.618844] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f74ef63-f481-4951-8173-1b47cb50a17c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.697980] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776628, 'name': Rename_Task, 'duration_secs': 0.158369} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.701042] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.701274] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b75f6fa-af3e-4a13-8dc6-bb97dec441fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.709302] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1795.709302] env[62508]: value = "task-1776633" [ 1795.709302] env[62508]: _type = "Task" [ 1795.709302] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.723546] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.731593] env[62508]: DEBUG oslo_concurrency.lockutils [None req-001e7dda-8a1e-44fe-aa55-6442f2c7d03f tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.046s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.753590] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776629, 'name': PowerOffVM_Task, 'duration_secs': 0.331815} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.753900] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1795.754116] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1795.754464] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3e23629-e49c-4f40-a563-2328f8bb6c0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.794578] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776630, 'name': PowerOffVM_Task, 'duration_secs': 0.207089} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.795110] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1795.795110] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1795.795343] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1562fbb6-b658-4009-b59c-e05e00aef10a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.822903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b201a0ac-a25b-4e98-9986-17d8cd08f3ca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.832395] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddda3d2-351b-47ea-bd3a-00829489e15e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.869766] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee12e1ae-0817-4c31-9114-a7c96ffb32d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.874030] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1795.874030] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1795.874030] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleting the datastore file [datastore1] b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1795.874592] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eeb9d2e7-d4c5-4532-bed1-ea0889d6740a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.876447] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1795.876698] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1795.876844] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleting the datastore file [datastore1] 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1795.877563] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfc52a25-564e-4201-b514-c80f82613844 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.887312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f834a6-caff-4645-95b4-96ecf96318c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.893324] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for the task: (returnval){ [ 1795.893324] env[62508]: value = "task-1776636" [ 1795.893324] env[62508]: _type = "Task" [ 1795.893324] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.897844] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1795.897844] env[62508]: value = "task-1776637" [ 1795.897844] env[62508]: _type = "Task" [ 1795.897844] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.910784] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776631, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.911313] env[62508]: DEBUG nova.compute.provider_tree [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1795.919082] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.925965] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.929632] env[62508]: DEBUG oslo_concurrency.lockutils [None req-555fc622-949e-484e-a98b-fa64474dbad6 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.325s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.960173] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.003806] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776632, 'name': Rename_Task, 'duration_secs': 0.17372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.004495] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.004495] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58c74b90-c6ed-4be5-8bbb-868b9cf9af3f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.012793] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1796.012793] env[62508]: value = "task-1776638" [ 1796.012793] env[62508]: _type = "Task" [ 1796.012793] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.023213] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.024213] env[62508]: DEBUG nova.network.neutron [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1796.106397] env[62508]: DEBUG nova.network.neutron [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.220037] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776633, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.389995] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776631, 'name': ReconfigVM_Task, 'duration_secs': 0.577147} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.390320] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.390963] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42b0bdd9-169e-4055-9c7a-96a2fec52302 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.398679] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1796.398679] env[62508]: value = "task-1776639" [ 1796.398679] env[62508]: _type = "Task" [ 1796.398679] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.401899] env[62508]: DEBUG oslo_vmware.api [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Task: {'id': task-1776636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217469} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.404786] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1796.404983] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1796.405176] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1796.405348] env[62508]: INFO nova.compute.manager [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1796.405589] env[62508]: DEBUG oslo.service.loopingcall [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.406127] env[62508]: DEBUG nova.compute.manager [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1796.406231] env[62508]: DEBUG nova.network.neutron [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1796.414301] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776639, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.425787] env[62508]: DEBUG oslo_vmware.api [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219688} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.426318] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1796.426512] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1796.426708] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1796.426921] env[62508]: INFO nova.compute.manager [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1796.427204] env[62508]: DEBUG oslo.service.loopingcall [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.427422] env[62508]: DEBUG nova.compute.manager [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1796.427565] env[62508]: DEBUG nova.network.neutron [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1796.440884] env[62508]: ERROR nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] [req-3ff3cf95-e122-4436-9858-31967f2592b0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3ff3cf95-e122-4436-9858-31967f2592b0"}]} [ 1796.457241] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1796.472088] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1796.472327] env[62508]: DEBUG nova.compute.provider_tree [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1796.484088] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1796.503416] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1796.525750] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776638, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.609054] env[62508]: DEBUG oslo_concurrency.lockutils [req-c9843e4c-fa19-400e-b574-340a39bc575c req-f118322f-b57b-43fd-859d-4c50c8719a07 service nova] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.612464] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.612589] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.721868] env[62508]: DEBUG oslo_vmware.api [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776633, 'name': PowerOnVM_Task, 'duration_secs': 0.546689} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.722259] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1796.722414] env[62508]: INFO nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Took 18.97 seconds to spawn the instance on the hypervisor. [ 1796.722557] env[62508]: DEBUG nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1796.723447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85fc9b1-ebb4-445d-ab69-a095b9a47273 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.739016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd44697-dac5-4f27-a7be-94c22df69673 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.749423] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333cfac0-fc91-4256-9680-f12f1768d8e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.784775] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.784998] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.785170] env[62508]: INFO nova.compute.manager [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Shelving [ 1796.790045] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248afe42-041e-40ef-8d1f-f36b2fbc6df6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.797191] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47429eb-4a19-49b4-8ff3-f59e13309597 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.816679] env[62508]: DEBUG nova.compute.provider_tree [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.844505] env[62508]: DEBUG nova.compute.manager [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1796.916395] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776639, 'name': Rename_Task, 'duration_secs': 0.283626} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.916808] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.917186] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4848f4ac-002f-49e6-bd89-6ef07bab4933 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.927506] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1796.927506] env[62508]: value = "task-1776640" [ 1796.927506] env[62508]: _type = "Task" [ 1796.927506] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.940964] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.024141] env[62508]: DEBUG oslo_vmware.api [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776638, 'name': PowerOnVM_Task, 'duration_secs': 0.669001} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.024430] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.024643] env[62508]: INFO nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Took 11.62 seconds to spawn the instance on the hypervisor. [ 1797.024828] env[62508]: DEBUG nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.025785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d53aab-d179-4972-84fb-e1e0627fbaae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.148221] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1797.246931] env[62508]: INFO nova.compute.manager [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Took 38.17 seconds to build instance. [ 1797.274714] env[62508]: DEBUG nova.compute.manager [req-d7d659ee-bf26-4d09-a387-daf84c1daca9 req-84884338-96e7-4156-be7b-e4c2a29df6a9 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Received event network-vif-deleted-0eda6157-2402-4297-8eb5-07a5b94eba56 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.274939] env[62508]: INFO nova.compute.manager [req-d7d659ee-bf26-4d09-a387-daf84c1daca9 req-84884338-96e7-4156-be7b-e4c2a29df6a9 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Neutron deleted interface 0eda6157-2402-4297-8eb5-07a5b94eba56; detaching it from the instance and deleting it from the info cache [ 1797.275132] env[62508]: DEBUG nova.network.neutron [req-d7d659ee-bf26-4d09-a387-daf84c1daca9 req-84884338-96e7-4156-be7b-e4c2a29df6a9 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.292715] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1797.293017] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff4e22c0-0a8f-48ff-8848-c86e05dd1638 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.301347] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1797.301347] env[62508]: value = "task-1776641" [ 1797.301347] env[62508]: _type = "Task" [ 1797.301347] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.312806] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.318783] env[62508]: DEBUG nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1797.345359] env[62508]: DEBUG nova.network.neutron [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.371367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.442095] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776640, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.458744] env[62508]: DEBUG nova.network.neutron [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.512897] env[62508]: DEBUG nova.compute.manager [req-9be2f032-8341-4e1d-897b-d4627054ee0c req-802c9320-8099-4bbe-8f11-f1d4e8db111e service nova] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Received event network-vif-deleted-f79ec10a-7a06-4ee2-8de0-4db1e03d23d1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.544512] env[62508]: INFO nova.compute.manager [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Took 17.08 seconds to build instance. [ 1797.739885] env[62508]: DEBUG nova.network.neutron [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.753742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-804458ec-3e02-43e1-92d2-b140e6372ce2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.687s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.777555] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-539ed8b8-4d3d-4155-994c-ed0793af5081 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.788812] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ee58ff-cd4a-4267-93a3-ecca66b903f3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.813145] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776641, 'name': PowerOffVM_Task, 'duration_secs': 0.224582} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.828564] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1797.829896] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.676s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.832046] env[62508]: DEBUG nova.compute.manager [req-d7d659ee-bf26-4d09-a387-daf84c1daca9 req-84884338-96e7-4156-be7b-e4c2a29df6a9 service nova] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Detach interface failed, port_id=0eda6157-2402-4297-8eb5-07a5b94eba56, reason: Instance 24091abb-f71f-4528-8fc5-b97725cf079e could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1797.833197] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2aceff4-8204-41fb-9121-8add864f7569 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.836038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.465s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.860028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.860136] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance network_info: |[{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1797.862105] env[62508]: INFO nova.scheduler.client.report [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Deleted allocations for instance 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a [ 1797.863215] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:ac:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed5b1d50-d456-43d1-887a-96dcb4f42cec', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.870984] env[62508]: DEBUG oslo.service.loopingcall [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.871849] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2322127e-87b4-4155-a1a4-7e35a346eb35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.878598] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1797.878598] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53769494-8d8e-4993-91e4-4410b1d176aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.902156] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.902156] env[62508]: value = "task-1776642" [ 1797.902156] env[62508]: _type = "Task" [ 1797.902156] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.911203] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776642, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.940871] env[62508]: DEBUG oslo_vmware.api [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776640, 'name': PowerOnVM_Task, 'duration_secs': 0.693126} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.941343] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.941446] env[62508]: INFO nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1797.941630] env[62508]: DEBUG nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.942536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a73162-1c63-42da-84df-d9e72a1976f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.961252] env[62508]: INFO nova.compute.manager [-] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Took 1.55 seconds to deallocate network for instance. [ 1798.046538] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c51341af-6d4f-41b0-b9d2-bc8388a55310 tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.596s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.242789] env[62508]: INFO nova.compute.manager [-] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Took 1.82 seconds to deallocate network for instance. [ 1798.342290] env[62508]: INFO nova.compute.claims [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1798.398205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ce5370f3-ae2a-4d0d-ba6b-9b4a3e3c2148 tempest-ServerRescueNegativeTestJSON-1548107976 tempest-ServerRescueNegativeTestJSON-1548107976-project-member] Lock "7015b188-17ca-45ec-8fe8-f80ef0f9cb0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.713s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.408972] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1798.409294] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a82c604d-beac-4e81-bf06-65329b97e507 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.417873] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776642, 'name': CreateVM_Task, 'duration_secs': 0.513133} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.419635] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.420038] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1798.420038] env[62508]: value = "task-1776643" [ 1798.420038] env[62508]: _type = "Task" [ 1798.420038] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.420703] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368806', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'name': 'volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3455694-a157-404f-8153-a9f96bac49a2', 'attached_at': '', 'detached_at': '', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'serial': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2'}, 'disk_bus': None, 'guest_format': None, 'attachment_id': '6ecda9fb-bfe1-48cf-a5b1-a0439cefcacb', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62508) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1798.420916] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Root volume attach. Driver type: vmdk {{(pid=62508) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1798.422015] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bab7ade-966c-45b2-9cfd-0a70a432bf4e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.436715] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d296286f-8281-4c20-9b64-df14f0866a51 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.439381] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776643, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.444898] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d836054-502f-46f9-89e0-067b92b24823 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.451525] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-39723f83-07c6-487e-bfee-ccb43e054390 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.462830] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1798.462830] env[62508]: value = "task-1776644" [ 1798.462830] env[62508]: _type = "Task" [ 1798.462830] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.463762] env[62508]: INFO nova.compute.manager [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Took 17.07 seconds to build instance. [ 1798.469156] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.476236] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776644, 'name': RelocateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.749525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.849390] env[62508]: INFO nova.compute.resource_tracker [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating resource usage from migration 175bc17b-cf18-4307-8ec2-2cf47b9564d9 [ 1798.933238] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776643, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.972825] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a6524270-4b59-476f-a08c-13c7dca38029 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.601s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.976914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.977220] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.986328] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776644, 'name': RelocateVM_Task, 'duration_secs': 0.453332} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.987104] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1798.987324] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368806', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'name': 'volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3455694-a157-404f-8153-a9f96bac49a2', 'attached_at': '', 'detached_at': '', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'serial': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1798.988278] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7ed2bd-7d16-4930-b353-d16f7f512630 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.011966] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807cf202-4f39-46a5-bade-b47d2b21f640 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.039070] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2/volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1799.042259] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d134fd2f-7e06-44a2-8841-a74556ad20c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.064770] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1799.064770] env[62508]: value = "task-1776645" [ 1799.064770] env[62508]: _type = "Task" [ 1799.064770] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.075980] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776645, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.096768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c9e17b-a6ae-4553-baf6-1dad2073995c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.105935] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56174c63-cae0-43c3-b93b-d9f21763fa65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.141451] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d55168-7bf3-49da-84cf-db5313d435f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.150912] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e4411b-6857-4f29-bae3-109e60a7d0b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.170046] env[62508]: DEBUG nova.compute.provider_tree [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.314513] env[62508]: DEBUG nova.compute.manager [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.314760] env[62508]: DEBUG nova.compute.manager [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1799.315091] env[62508]: DEBUG oslo_concurrency.lockutils [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.315252] env[62508]: DEBUG oslo_concurrency.lockutils [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.315509] env[62508]: DEBUG nova.network.neutron [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1799.434161] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776643, 'name': CreateSnapshot_Task, 'duration_secs': 0.583916} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.434161] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1799.435159] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acc329b-7128-4258-acdb-96d7c99cbb45 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.487448] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1799.581172] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776645, 'name': ReconfigVM_Task, 'duration_secs': 0.507353} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.581172] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2/volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1799.586876] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70988c9-99ef-434c-8c41-834f47110669 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.607234] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1799.607234] env[62508]: value = "task-1776646" [ 1799.607234] env[62508]: _type = "Task" [ 1799.607234] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.616061] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.647419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "3e79a6d4-8639-478e-8753-71ff0e07496f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.647419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.647419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.647419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.647419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.648195] env[62508]: INFO nova.compute.manager [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Terminating instance [ 1799.650797] env[62508]: DEBUG nova.compute.manager [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1799.651198] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1799.652201] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669cc235-8221-48c7-b480-037a078ee3f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.660458] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1799.660938] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-faea90ca-71bb-43eb-bae9-a20bf8eff773 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.668313] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1799.668313] env[62508]: value = "task-1776647" [ 1799.668313] env[62508]: _type = "Task" [ 1799.668313] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.672882] env[62508]: DEBUG nova.scheduler.client.report [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1799.685192] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.961960] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1799.967375] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9956d0f0-c0a1-448d-a116-294fbf1b0944 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.981344] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1799.981344] env[62508]: value = "task-1776648" [ 1799.981344] env[62508]: _type = "Task" [ 1799.981344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.994904] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.014666] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.091471] env[62508]: DEBUG nova.network.neutron [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.091471] env[62508]: DEBUG nova.network.neutron [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.118542] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776646, 'name': ReconfigVM_Task, 'duration_secs': 0.301608} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.118852] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368806', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'name': 'volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3455694-a157-404f-8153-a9f96bac49a2', 'attached_at': '', 'detached_at': '', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'serial': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1800.119716] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c439d592-3549-4600-8c44-c00e7a3e43ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.129255] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1800.129255] env[62508]: value = "task-1776649" [ 1800.129255] env[62508]: _type = "Task" [ 1800.129255] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.141291] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776649, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.181915] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.346s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.182448] env[62508]: INFO nova.compute.manager [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Migrating [ 1800.191559] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776647, 'name': PowerOffVM_Task, 'duration_secs': 0.33887} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.194951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.726s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.195343] env[62508]: DEBUG nova.objects.instance [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lazy-loading 'resources' on Instance uuid b83dd148-8cf6-474b-bb19-e0822732b12a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.196438] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1800.196639] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1800.201919] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22d50507-59b5-4729-97fe-f30812b43e8e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.494035] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.594048] env[62508]: DEBUG oslo_concurrency.lockutils [req-eab4b6f5-243a-47d4-aef0-1c66dcd4e017 req-6b83bffa-1ec8-4676-9464-92cfea689f2d service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.642437] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776649, 'name': Rename_Task, 'duration_secs': 0.182235} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.645170] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1800.645170] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3cd5803e-42ea-4512-9bb0-7e334567c12e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.651176] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1800.651176] env[62508]: value = "task-1776651" [ 1800.651176] env[62508]: _type = "Task" [ 1800.651176] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.659967] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.707030] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.707118] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.707496] env[62508]: DEBUG nova.network.neutron [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1800.906909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bda470-5b27-4859-84c4-b33c6d38a24d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.915274] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2032d812-01ca-4a53-bf3b-9d3fad9d04d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.948082] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39ba50a-95bf-4629-bc5d-58c47b890fa5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.956962] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7ad43d-d918-4809-ac7d-a4e0a664d91f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.974359] env[62508]: DEBUG nova.compute.provider_tree [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.994370] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.164903] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776651, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.210847] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.211153] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.211343] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Deleting the datastore file [datastore1] 3e79a6d4-8639-478e-8753-71ff0e07496f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.211626] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d32561d8-e01a-466c-8260-692f19f03c2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.221382] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for the task: (returnval){ [ 1801.221382] env[62508]: value = "task-1776652" [ 1801.221382] env[62508]: _type = "Task" [ 1801.221382] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.229386] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.348892] env[62508]: DEBUG nova.compute.manager [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Received event network-changed-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.349026] env[62508]: DEBUG nova.compute.manager [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Refreshing instance network info cache due to event network-changed-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1801.349286] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.349471] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.349672] env[62508]: DEBUG nova.network.neutron [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Refreshing network info cache for port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1801.477929] env[62508]: DEBUG nova.scheduler.client.report [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1801.492666] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.523841] env[62508]: DEBUG nova.network.neutron [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.662530] env[62508]: DEBUG oslo_vmware.api [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776651, 'name': PowerOnVM_Task, 'duration_secs': 0.531945} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.662847] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1801.663075] env[62508]: INFO nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Took 6.06 seconds to spawn the instance on the hypervisor. [ 1801.663257] env[62508]: DEBUG nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1801.664083] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e006e30-f16c-4500-8e26-6968e7d0789d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.731683] env[62508]: DEBUG oslo_vmware.api [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Task: {'id': task-1776652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221212} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.731990] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.732222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.732412] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.732589] env[62508]: INFO nova.compute.manager [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Took 2.08 seconds to destroy the instance on the hypervisor. [ 1801.732832] env[62508]: DEBUG oslo.service.loopingcall [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.733044] env[62508]: DEBUG nova.compute.manager [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1801.733142] env[62508]: DEBUG nova.network.neutron [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.983469] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.985758] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.236s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.986012] env[62508]: DEBUG nova.objects.instance [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'resources' on Instance uuid 24091abb-f71f-4528-8fc5-b97725cf079e {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.997697] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.016023] env[62508]: INFO nova.scheduler.client.report [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Deleted allocations for instance b83dd148-8cf6-474b-bb19-e0822732b12a [ 1802.026964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.184332] env[62508]: INFO nova.compute.manager [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Took 12.28 seconds to build instance. [ 1802.223882] env[62508]: DEBUG nova.compute.manager [req-a80a93db-6a17-492c-a40e-e0befeef41b1 req-060d4082-6700-4f3e-baee-aeef8a0df3b8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Received event network-vif-deleted-cf175192-2f32-4a16-aa1c-26be6500c839 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.224176] env[62508]: INFO nova.compute.manager [req-a80a93db-6a17-492c-a40e-e0befeef41b1 req-060d4082-6700-4f3e-baee-aeef8a0df3b8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Neutron deleted interface cf175192-2f32-4a16-aa1c-26be6500c839; detaching it from the instance and deleting it from the info cache [ 1802.224329] env[62508]: DEBUG nova.network.neutron [req-a80a93db-6a17-492c-a40e-e0befeef41b1 req-060d4082-6700-4f3e-baee-aeef8a0df3b8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.226826] env[62508]: DEBUG nova.network.neutron [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updated VIF entry in instance network info cache for port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1802.227192] env[62508]: DEBUG nova.network.neutron [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.505364] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776648, 'name': CloneVM_Task, 'duration_secs': 2.193434} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.506085] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Created linked-clone VM from snapshot [ 1802.509112] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f0707f-fc1f-4d27-bb0f-93ef08e32d00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.518177] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Uploading image 5bb3b02a-ae1d-4ff5-982b-1495a1fb26fe {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1802.533194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e0089b63-8159-4df8-8826-28ce6f0d0e4c tempest-ListServerFiltersTestJSON-67719888 tempest-ListServerFiltersTestJSON-67719888-project-member] Lock "b83dd148-8cf6-474b-bb19-e0822732b12a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.325s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.565737] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1802.565737] env[62508]: value = "vm-368820" [ 1802.565737] env[62508]: _type = "VirtualMachine" [ 1802.565737] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1802.566041] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8b054876-7daf-416d-b1ba-e20e5252b6b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.575991] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lease: (returnval){ [ 1802.575991] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522fe905-94b2-33b5-9e16-9f299013aaf1" [ 1802.575991] env[62508]: _type = "HttpNfcLease" [ 1802.575991] env[62508]: } obtained for exporting VM: (result){ [ 1802.575991] env[62508]: value = "vm-368820" [ 1802.575991] env[62508]: _type = "VirtualMachine" [ 1802.575991] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1802.575991] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the lease: (returnval){ [ 1802.575991] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522fe905-94b2-33b5-9e16-9f299013aaf1" [ 1802.575991] env[62508]: _type = "HttpNfcLease" [ 1802.575991] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1802.588709] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1802.588709] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522fe905-94b2-33b5-9e16-9f299013aaf1" [ 1802.588709] env[62508]: _type = "HttpNfcLease" [ 1802.588709] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1802.687276] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fcc41fe9-f0b2-4403-8d76-b7660afd2019 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.792s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.703220] env[62508]: DEBUG nova.network.neutron [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.729441] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-851abc3d-a15b-4f94-91a5-01229d7e40a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.732685] env[62508]: DEBUG oslo_concurrency.lockutils [req-bf5a30f6-6306-4513-9484-4df2a3e0a1c7 req-c4f79c82-4a17-403c-89f7-6c191765b7e7 service nova] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.745009] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d772b2-05f2-4d8a-92d0-e98aae978f01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.759767] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe92d357-db46-42d2-b2dc-ded08270ee2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.773503] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11dea6b-e71b-4ffe-bb78-826a56fb7f1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.818393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6dc3afd-9054-4183-b222-cc80d448dbe3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.821226] env[62508]: DEBUG nova.compute.manager [req-a80a93db-6a17-492c-a40e-e0befeef41b1 req-060d4082-6700-4f3e-baee-aeef8a0df3b8 service nova] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Detach interface failed, port_id=cf175192-2f32-4a16-aa1c-26be6500c839, reason: Instance 3e79a6d4-8639-478e-8753-71ff0e07496f could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1802.827200] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8ae24f-98ae-4e6a-8a76-2cca50920011 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.841173] env[62508]: DEBUG nova.compute.provider_tree [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1803.091140] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1803.091140] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522fe905-94b2-33b5-9e16-9f299013aaf1" [ 1803.091140] env[62508]: _type = "HttpNfcLease" [ 1803.091140] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1803.091436] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1803.091436] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522fe905-94b2-33b5-9e16-9f299013aaf1" [ 1803.091436] env[62508]: _type = "HttpNfcLease" [ 1803.091436] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1803.092332] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab7a28-2b43-4de9-8db0-e683f179ed81 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.101386] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1803.101636] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1803.205714] env[62508]: INFO nova.compute.manager [-] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Took 1.47 seconds to deallocate network for instance. [ 1803.213620] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-da18aefb-e9e4-4137-8b3c-960fca7dd133 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.344043] env[62508]: DEBUG nova.scheduler.client.report [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1803.433433] env[62508]: DEBUG nova.compute.manager [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.433530] env[62508]: DEBUG nova.compute.manager [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing instance network info cache due to event network-changed-d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1803.433720] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Acquiring lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.433892] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Acquired lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.434775] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Refreshing network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.547215] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131c2a35-cc04-42dc-af43-c60dd411ec5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.572994] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1803.717661] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.850415] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.855430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.839s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.855430] env[62508]: INFO nova.compute.claims [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.881423] env[62508]: INFO nova.scheduler.client.report [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleted allocations for instance 24091abb-f71f-4528-8fc5-b97725cf079e [ 1804.080318] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1804.080596] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49567c88-0441-44f2-b78f-e2b95d229482 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.094861] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1804.094861] env[62508]: value = "task-1776654" [ 1804.094861] env[62508]: _type = "Task" [ 1804.094861] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.106689] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.183934] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updated VIF entry in instance network info cache for port d8028a3e-f50d-41fa-b065-a2babc831eec. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.184512] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [{"id": "d8028a3e-f50d-41fa-b065-a2babc831eec", "address": "fa:16:3e:d0:df:77", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8028a3e-f5", "ovs_interfaceid": "d8028a3e-f50d-41fa-b065-a2babc831eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.393259] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d65bdf3d-9759-4b95-9383-9f057132d9de tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "24091abb-f71f-4528-8fc5-b97725cf079e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.135s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.607332] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776654, 'name': PowerOffVM_Task, 'duration_secs': 0.291662} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.607642] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1804.607849] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1804.690320] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Releasing lock "refresh_cache-de69dbf0-86f1-4b05-a9db-8b9afaabe49c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.690320] env[62508]: DEBUG nova.compute.manager [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Received event network-changed-ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1804.690320] env[62508]: DEBUG nova.compute.manager [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Refreshing instance network info cache due to event network-changed-ed5b1d50-d456-43d1-887a-96dcb4f42cec. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1804.690320] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.690320] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.690320] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Refreshing network info cache for port ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1804.778189] env[62508]: DEBUG nova.compute.manager [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1805.052328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090f8c1e-2a64-4863-9ed9-863b99109ff0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.060749] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06835af-5d33-4e1c-a004-87a2aab4de6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.093809] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de781fd4-84b3-4e68-b69d-989fe7fb04c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.103159] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dff630e-87c6-419a-bf3e-aa752b4930c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.119419] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.119666] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.119828] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.120136] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.120315] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.120469] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.120675] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.121148] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.121398] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.121526] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.121714] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.128281] env[62508]: DEBUG nova.compute.provider_tree [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.129846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d483ecb7-b83d-4a2f-b99b-c4f59830027a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.148316] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1805.148316] env[62508]: value = "task-1776655" [ 1805.148316] env[62508]: _type = "Task" [ 1805.148316] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.160127] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776655, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.297847] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.402435] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "cd2424b1-3842-4df4-8636-23417833ea49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.402778] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.517564] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updated VIF entry in instance network info cache for port ed5b1d50-d456-43d1-887a-96dcb4f42cec. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1805.518014] env[62508]: DEBUG nova.network.neutron [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.642275] env[62508]: DEBUG nova.scheduler.client.report [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1805.663999] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776655, 'name': ReconfigVM_Task, 'duration_secs': 0.304855} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.664976] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1805.905188] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1806.021693] env[62508]: DEBUG oslo_concurrency.lockutils [req-9c106dfe-8c1d-443a-a5b9-b1883eaef903 req-707ab660-6705-4115-ae1e-d3208ec3366f service nova] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.147309] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.148089] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1806.152088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.435s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.152569] env[62508]: DEBUG nova.objects.instance [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lazy-loading 'resources' on Instance uuid 3e79a6d4-8639-478e-8753-71ff0e07496f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1806.171917] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1806.172319] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1806.172410] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1806.172586] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1806.172747] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1806.172899] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1806.173117] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1806.173285] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1806.173469] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1806.174447] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1806.174447] env[62508]: DEBUG nova.virt.hardware [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1806.179393] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1806.180158] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9ad7bd5-74b6-4ee2-b03a-4274d491b6e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.202392] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1806.202392] env[62508]: value = "task-1776656" [ 1806.202392] env[62508]: _type = "Task" [ 1806.202392] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.211936] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.429193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.655729] env[62508]: DEBUG nova.compute.utils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1806.660027] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1806.660235] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1806.706324] env[62508]: DEBUG nova.policy [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1806.718335] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776656, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.862064] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173ed307-8c0a-4501-a0fb-edd57241be2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.871544] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed0a415-484f-4172-8a78-a3b964b5e7b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.906771] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f92ae6-9ef3-42d0-a746-04f86c226176 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.916341] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98964965-1a5b-4c85-b88a-033763586d20 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.930580] env[62508]: DEBUG nova.compute.provider_tree [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.105186] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.105464] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.161225] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1807.214521] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776656, 'name': ReconfigVM_Task, 'duration_secs': 0.583762} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.215012] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1807.216093] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27586c19-5eb0-403f-81e9-48c95a11624f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.242883] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.243215] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874044ac-cbaa-4a69-91ee-b5932a9822b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.256909] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Successfully created port: 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1807.265238] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1807.265238] env[62508]: value = "task-1776657" [ 1807.265238] env[62508]: _type = "Task" [ 1807.265238] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.276249] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.436020] env[62508]: DEBUG nova.scheduler.client.report [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1807.608055] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1807.777325] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.939378] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.941763] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.644s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.966257] env[62508]: INFO nova.scheduler.client.report [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Deleted allocations for instance 3e79a6d4-8639-478e-8753-71ff0e07496f [ 1808.130141] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.170239] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1808.201380] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1808.202299] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1808.202299] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.202299] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1808.202299] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.202299] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1808.202536] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1808.202705] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1808.203367] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1808.203367] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1808.203367] env[62508]: DEBUG nova.virt.hardware [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1808.204136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd35ae7-b89d-4622-a1a4-c92a7779d7f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.213625] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252a7424-cd14-4a44-ac64-e05f4e2e2c9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.276108] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776657, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.447464] env[62508]: INFO nova.compute.claims [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.474486] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f061a1be-a983-4152-add9-f4fe4bc5aa9c tempest-ServerMetadataTestJSON-2033878923 tempest-ServerMetadataTestJSON-2033878923-project-member] Lock "3e79a6d4-8639-478e-8753-71ff0e07496f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.829s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.760597] env[62508]: DEBUG nova.compute.manager [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-vif-plugged-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1808.760935] env[62508]: DEBUG oslo_concurrency.lockutils [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.761233] env[62508]: DEBUG oslo_concurrency.lockutils [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.761418] env[62508]: DEBUG oslo_concurrency.lockutils [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.761624] env[62508]: DEBUG nova.compute.manager [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] No waiting events found dispatching network-vif-plugged-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1808.761798] env[62508]: WARNING nova.compute.manager [req-96e420ce-9143-4c83-9eb3-7b812e94b734 req-249f875a-5f1e-4021-a615-5e2c6303ec0c service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received unexpected event network-vif-plugged-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 for instance with vm_state building and task_state spawning. [ 1808.780024] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776657, 'name': ReconfigVM_Task, 'duration_secs': 1.13357} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.780024] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.780024] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1808.953976] env[62508]: INFO nova.compute.resource_tracker [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating resource usage from migration db69c3cd-b958-40bd-b854-dd7203af037b [ 1808.969651] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Successfully updated port: 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.262759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef457079-16da-4344-9251-3356d2051b71 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.271505] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb965fa4-4acd-4c61-a378-91a87007be10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.309428] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cf2d11-3e2c-45d0-9fe6-99626bfe0bb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.313381] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d8dfeb-31f2-4426-aeaa-6d3d09da6fae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.341014] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ee5e30-113c-4454-8767-270d525da1ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.345851] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ddaa32-47d0-41ba-86f7-1d2995fa0a3e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.360176] env[62508]: DEBUG nova.compute.provider_tree [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.380065] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1809.475561] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.475561] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.475561] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.863746] env[62508]: DEBUG nova.scheduler.client.report [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1810.010352] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.157779] env[62508]: DEBUG nova.network.neutron [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.369548] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.427s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.370047] env[62508]: INFO nova.compute.manager [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Migrating [ 1810.381026] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.951s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.381929] env[62508]: INFO nova.compute.claims [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1810.660794] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.661304] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Instance network_info: |[{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1810.661813] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:2e:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.669624] env[62508]: DEBUG oslo.service.loopingcall [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.669887] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.670172] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-212683d6-2bb6-42c6-955d-08f27059e6ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.692447] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1810.692447] env[62508]: value = "task-1776658" [ 1810.692447] env[62508]: _type = "Task" [ 1810.692447] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.702177] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776658, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.791394] env[62508]: DEBUG nova.compute.manager [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1810.791674] env[62508]: DEBUG nova.compute.manager [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing instance network info cache due to event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1810.791945] env[62508]: DEBUG oslo_concurrency.lockutils [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.792236] env[62508]: DEBUG oslo_concurrency.lockutils [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.792454] env[62508]: DEBUG nova.network.neutron [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1810.900540] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.900918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.900981] env[62508]: DEBUG nova.network.neutron [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.054952] env[62508]: DEBUG nova.network.neutron [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Port 87ce4777-2520-4432-a1ed-03e189684761 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1811.204266] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776658, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.543297] env[62508]: DEBUG nova.network.neutron [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updated VIF entry in instance network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1811.543393] env[62508]: DEBUG nova.network.neutron [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.647712] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145a228f-3bb6-45b6-be1e-ed5f7b145491 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.657397] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97ce468-9f89-4f57-b053-3702801d0c7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.689977] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aa8471-e474-4146-b125-b7495cc115a1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.694780] env[62508]: DEBUG nova.network.neutron [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.702631] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be63a87-0518-4500-931c-7d7e81894e64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.714856] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776658, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.723139] env[62508]: DEBUG nova.compute.provider_tree [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1812.049679] env[62508]: DEBUG oslo_concurrency.lockutils [req-21b6dc62-1b50-44ce-8419-81c92f6f622d req-fc08806d-c3bd-4c35-ba87-e349048ca4bb service nova] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.058043] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1812.059055] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cc7ce4-99b1-405e-9528-e4ad8fe1c6c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.074491] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1812.074667] env[62508]: ERROR oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk due to incomplete transfer. [ 1812.077479] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f6be1d64-c881-42c5-8b21-6c86863c6448 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.083717] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.083933] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.084129] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.092199] env[62508]: DEBUG oslo_vmware.rw_handles [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52306b8a-04b1-667a-9918-7c8bdd91c1a2/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1812.092346] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Uploaded image 5bb3b02a-ae1d-4ff5-982b-1495a1fb26fe to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1812.094708] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1812.095756] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7c6e3fd2-6651-4c33-81cb-e3f2021cb7f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.103566] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1812.103566] env[62508]: value = "task-1776659" [ 1812.103566] env[62508]: _type = "Task" [ 1812.103566] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.113380] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776659, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.201712] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.206306] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776658, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.226393] env[62508]: DEBUG nova.scheduler.client.report [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1812.615409] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776659, 'name': Destroy_Task, 'duration_secs': 0.442838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.615602] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Destroyed the VM [ 1812.615891] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1812.616466] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cfd2b415-cb27-4659-8235-b17e568d9b1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.626095] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1812.626095] env[62508]: value = "task-1776660" [ 1812.626095] env[62508]: _type = "Task" [ 1812.626095] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.635426] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776660, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.706781] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776658, 'name': CreateVM_Task, 'duration_secs': 1.88898} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.711146] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1812.711146] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.711467] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.711629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1812.712098] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4fcec3-4109-4cf6-b490-722e3929bd26 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.718018] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1812.718018] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d86334-ee90-8f7b-79b3-95cad2e2ad0f" [ 1812.718018] env[62508]: _type = "Task" [ 1812.718018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.726680] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d86334-ee90-8f7b-79b3-95cad2e2ad0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.730933] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.731541] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1812.734180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.604s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.736661] env[62508]: INFO nova.compute.claims [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1813.118979] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.119297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.119342] env[62508]: DEBUG nova.network.neutron [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1813.138813] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776660, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.231053] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d86334-ee90-8f7b-79b3-95cad2e2ad0f, 'name': SearchDatastore_Task, 'duration_secs': 0.010375} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.231268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.231437] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1813.231682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.231831] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.232017] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1813.232287] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a55c04f-aa92-460e-afa2-ce38e94344f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.240193] env[62508]: DEBUG nova.compute.utils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.244481] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1813.244655] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1813.246595] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1813.246808] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1813.247968] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dac3d60-a1aa-4743-b1cd-8873dfe4bebc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.255805] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1813.255805] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5276c057-e4ed-33cd-79f6-1111f428f903" [ 1813.255805] env[62508]: _type = "Task" [ 1813.255805] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.266216] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5276c057-e4ed-33cd-79f6-1111f428f903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.321805] env[62508]: DEBUG nova.policy [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1813.571961] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Successfully created port: 405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1813.639872] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776660, 'name': RemoveSnapshot_Task, 'duration_secs': 0.861688} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.639872] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1813.642076] env[62508]: DEBUG nova.compute.manager [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1813.643019] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74db0770-f930-4cfd-957b-a58b3b6a4baa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.720820] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47321f6a-cfae-4664-86be-712111ad24cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.742622] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1813.748256] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1813.766523] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5276c057-e4ed-33cd-79f6-1111f428f903, 'name': SearchDatastore_Task, 'duration_secs': 0.010327} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.767393] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37dc5d4d-d243-4c7e-bb43-da0c9e4e47a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.776296] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1813.776296] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209044a-0ced-aa01-3bc4-1701d9b66c47" [ 1813.776296] env[62508]: _type = "Task" [ 1813.776296] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.786994] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209044a-0ced-aa01-3bc4-1701d9b66c47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.907458] env[62508]: DEBUG nova.network.neutron [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.959549] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44135d16-7a7f-43a9-b0b3-5f0ca0acaa64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.971715] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554670fa-bda1-4d89-8088-9811a54340c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.009321] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b44c30d-c47d-4817-8d79-62d1dd054ba3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.017968] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63856dc6-5d2b-43d8-bb5f-c04e32dfc0dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.032986] env[62508]: DEBUG nova.compute.provider_tree [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.166563] env[62508]: INFO nova.compute.manager [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Shelve offloading [ 1814.167983] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.168395] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68e9a18d-40de-440d-a882-aa26fb6eb39c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.178958] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1814.178958] env[62508]: value = "task-1776661" [ 1814.178958] env[62508]: _type = "Task" [ 1814.178958] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.191460] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1814.193069] env[62508]: DEBUG nova.compute.manager [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.193069] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852f3782-d024-475e-80d8-140693467149 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.199623] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.199796] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.199972] env[62508]: DEBUG nova.network.neutron [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1814.254594] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.257916] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a57c2cfd-1c35-49a2-9efa-d3a78747d8c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.266498] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1814.266498] env[62508]: value = "task-1776662" [ 1814.266498] env[62508]: _type = "Task" [ 1814.266498] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.276694] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.287057] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5209044a-0ced-aa01-3bc4-1701d9b66c47, 'name': SearchDatastore_Task, 'duration_secs': 0.010594} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.287338] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.287599] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7c5176-4420-44b1-9fea-6db7561492c7/aa7c5176-4420-44b1-9fea-6db7561492c7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1814.287857] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0eb93fe1-039c-4526-b532-932268a66d1c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.297704] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1814.297704] env[62508]: value = "task-1776663" [ 1814.297704] env[62508]: _type = "Task" [ 1814.297704] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.308445] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.410423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.536516] env[62508]: DEBUG nova.scheduler.client.report [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.762134] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1814.781386] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776662, 'name': PowerOffVM_Task, 'duration_secs': 0.454683} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.781664] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.781853] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1814.795073] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1814.795412] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1814.795638] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1814.795901] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1814.796135] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1814.796357] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1814.796650] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1814.796873] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1814.797131] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1814.797378] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1814.797621] env[62508]: DEBUG nova.virt.hardware [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1814.799048] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a2b347-c128-4ec5-9ac7-c5058e4d2832 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.814903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc0850c-4b9f-47a5-aca6-cbce752fabd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.819061] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776663, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501997} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.821328] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aa7c5176-4420-44b1-9fea-6db7561492c7/aa7c5176-4420-44b1-9fea-6db7561492c7.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1814.821539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1814.822129] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1235d727-780b-453f-8f66-4c0b609240e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.837625] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1814.837625] env[62508]: value = "task-1776664" [ 1814.837625] env[62508]: _type = "Task" [ 1814.837625] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.846281] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.920671] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc5cdb1-5314-42cf-bb6b-54c514f03c38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.931042] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3650f5-0351-43f6-a875-00b26aa16480 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.933017] env[62508]: DEBUG nova.network.neutron [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updating instance_info_cache with network_info: [{"id": "4285d4c3-fb9d-444b-8988-be3ee4475807", "address": "fa:16:3e:ef:6e:db", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4285d4c3-fb", "ovs_interfaceid": "4285d4c3-fb9d-444b-8988-be3ee4475807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.041475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.042125] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1815.290058] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1815.290058] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1815.290058] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1815.290058] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1815.290058] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1815.290583] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1815.290583] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1815.290583] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1815.290830] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1815.291043] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1815.291270] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1815.296334] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bc63785-5128-4262-8edd-3c7787d7eccf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.314118] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1815.314118] env[62508]: value = "task-1776665" [ 1815.314118] env[62508]: _type = "Task" [ 1815.314118] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.323323] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776665, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.347170] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070832} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.347427] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1815.348247] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301afbc7-10ca-4c04-8bc9-c94d6ed040fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.370083] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] aa7c5176-4420-44b1-9fea-6db7561492c7/aa7c5176-4420-44b1-9fea-6db7561492c7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1815.370367] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c890e35-a4ae-4553-957b-1169542cbeea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.389534] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1815.389534] env[62508]: value = "task-1776666" [ 1815.389534] env[62508]: _type = "Task" [ 1815.389534] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.397670] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776666, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.435578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.548116] env[62508]: DEBUG nova.compute.utils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1815.550778] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1815.552233] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1815.621967] env[62508]: DEBUG nova.policy [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e41dfb589d24bb1ac97ebb67ea59f9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e682eb7bbd240afb2f6581c7478b99c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1815.787244] env[62508]: DEBUG nova.compute.manager [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received event network-vif-unplugged-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1815.787244] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.787244] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.787244] env[62508]: DEBUG oslo_concurrency.lockutils [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.787244] env[62508]: DEBUG nova.compute.manager [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] No waiting events found dispatching network-vif-unplugged-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1815.787244] env[62508]: WARNING nova.compute.manager [req-1bccdacf-88d3-408c-aa6f-ae6b81b7ea54 req-56fce550-9a1b-4745-a2df-ef00a0e24f8d service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received unexpected event network-vif-unplugged-4285d4c3-fb9d-444b-8988-be3ee4475807 for instance with vm_state shelved and task_state shelving_offloading. [ 1815.827634] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776665, 'name': ReconfigVM_Task, 'duration_secs': 0.303858} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.829068] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1815.892183] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1815.896689] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d811a2-5b58-40c7-a64d-f37c2e091447 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.905758] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776666, 'name': ReconfigVM_Task, 'duration_secs': 0.292648} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.907938] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfigured VM instance instance-00000066 to attach disk [datastore1] aa7c5176-4420-44b1-9fea-6db7561492c7/aa7c5176-4420-44b1-9fea-6db7561492c7.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1815.908623] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1815.908846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f980698-e177-4603-aa6b-7fa443a6b566 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.910350] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbc816db-9e2e-45e7-a6a9-8f367f14a32f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.917593] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1815.917593] env[62508]: value = "task-1776667" [ 1815.917593] env[62508]: _type = "Task" [ 1815.917593] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.927206] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776667, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.038255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1655182-0139-4667-bb93-13a72b46ca07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.062838] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1816.065855] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0beb04d2-44fa-4b1c-bd68-12540f719b42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.070426] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Successfully created port: cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1816.076246] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1816.341328] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1816.341328] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1816.341328] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1816.341802] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1816.341802] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1816.341894] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1816.342141] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1816.342326] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1816.342548] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1816.342659] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1816.342866] env[62508]: DEBUG nova.virt.hardware [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1816.349204] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1816.349506] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dab4c17b-1efa-4f88-86ca-9ab378381097 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.370886] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1816.370886] env[62508]: value = "task-1776669" [ 1816.370886] env[62508]: _type = "Task" [ 1816.370886] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.379705] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.427461] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776667, 'name': Rename_Task, 'duration_secs': 0.168363} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.427764] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.428034] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93206988-43a4-4305-ac4a-2f118d8d3195 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.435058] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1816.435058] env[62508]: value = "task-1776670" [ 1816.435058] env[62508]: _type = "Task" [ 1816.435058] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.443393] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.581997] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.582321] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53fd5018-902c-45e4-a25e-756856f99280 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.589545] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1816.589545] env[62508]: value = "task-1776671" [ 1816.589545] env[62508]: _type = "Task" [ 1816.589545] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.601615] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.882069] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776669, 'name': ReconfigVM_Task, 'duration_secs': 0.231993} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.882513] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1816.883357] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf12e22-448e-4f12-8f1a-415e094aca91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.906157] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2/volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1816.906453] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0f08578-a3cd-4d3f-8ce3-e3094dc0b40c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.925746] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1816.925746] env[62508]: value = "task-1776672" [ 1816.925746] env[62508]: _type = "Task" [ 1816.925746] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.934337] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776672, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.943316] env[62508]: DEBUG oslo_vmware.api [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776670, 'name': PowerOnVM_Task, 'duration_secs': 0.451253} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.943574] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.943777] env[62508]: INFO nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1816.943951] env[62508]: DEBUG nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1816.944708] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1b9b6b-143f-473d-981c-dddce2868e6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.079444] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1817.101599] env[62508]: DEBUG oslo_vmware.api [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776671, 'name': PowerOnVM_Task, 'duration_secs': 0.47307} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.104186] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1817.104458] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-990bf4e6-429c-4763-a314-67ebee891668 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance '2aeb5a4c-785a-4238-8575-ecd1ff84b97c' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1817.114489] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1817.114733] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1817.114920] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.115194] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1817.115366] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.115522] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1817.115760] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1817.115940] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1817.116139] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1817.116312] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1817.116922] env[62508]: DEBUG nova.virt.hardware [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1817.117378] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2217f37-d97c-490f-afa6-475202d3ee40 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.127324] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb075dd-7334-412e-856f-ca6d83922c86 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.436548] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776672, 'name': ReconfigVM_Task, 'duration_secs': 0.418714} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.436873] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2/volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.437149] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1817.464023] env[62508]: INFO nova.compute.manager [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Took 17.47 seconds to build instance. [ 1817.816830] env[62508]: DEBUG nova.compute.manager [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Received event network-changed-4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1817.817049] env[62508]: DEBUG nova.compute.manager [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Refreshing instance network info cache due to event network-changed-4285d4c3-fb9d-444b-8988-be3ee4475807. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1817.817317] env[62508]: DEBUG oslo_concurrency.lockutils [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] Acquiring lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.817467] env[62508]: DEBUG oslo_concurrency.lockutils [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] Acquired lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.817565] env[62508]: DEBUG nova.network.neutron [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Refreshing network info cache for port 4285d4c3-fb9d-444b-8988-be3ee4475807 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.944447] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa568aa3-e6e4-4ca2-907a-0f2a04c4a8f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.964823] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a01e45-dfdf-485d-a045-69ba0b232a89 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.967659] env[62508]: DEBUG oslo_concurrency.lockutils [None req-921ed7dd-197d-45ba-be5a-e2a096149eea tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.990s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.984259] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1818.717980] env[62508]: DEBUG nova.network.neutron [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updated VIF entry in instance network info cache for port 4285d4c3-fb9d-444b-8988-be3ee4475807. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.718301] env[62508]: DEBUG nova.network.neutron [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updating instance_info_cache with network_info: [{"id": "4285d4c3-fb9d-444b-8988-be3ee4475807", "address": "fa:16:3e:ef:6e:db", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": null, "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4285d4c3-fb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.221109] env[62508]: DEBUG oslo_concurrency.lockutils [req-63fba6c0-b990-4c84-a188-c9acce86ad3d req-e0fe10f4-f7fd-486f-b522-08781f84e4eb service nova] Releasing lock "refresh_cache-a15f3cef-c260-4a54-83af-7cccf81e15a6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.648609] env[62508]: DEBUG nova.network.neutron [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Port ed5b1d50-d456-43d1-887a-96dcb4f42cec binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1819.784836] env[62508]: DEBUG nova.network.neutron [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Port 87ce4777-2520-4432-a1ed-03e189684761 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1819.790019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.790019] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.790276] env[62508]: DEBUG nova.network.neutron [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1819.848985] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.849426] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1819.849660] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.849876] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.849963] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1820.123032] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.123032] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.123545] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] a15f3cef-c260-4a54-83af-7cccf81e15a6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.123770] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96b994d2-a7a2-4bb4-902f-a3f4202466fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.132053] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1820.132053] env[62508]: value = "task-1776673" [ 1820.132053] env[62508]: _type = "Task" [ 1820.132053] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.140341] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.491981] env[62508]: DEBUG nova.network.neutron [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.580459] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1820.580852] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.642642] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.669946] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.669946] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.670157] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.995121] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.060018] env[62508]: DEBUG nova.compute.manager [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Received event network-vif-plugged-405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.060018] env[62508]: DEBUG oslo_concurrency.lockutils [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] Acquiring lock "cd2424b1-3842-4df4-8636-23417833ea49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.060018] env[62508]: DEBUG oslo_concurrency.lockutils [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] Lock "cd2424b1-3842-4df4-8636-23417833ea49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.060018] env[62508]: DEBUG oslo_concurrency.lockutils [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] Lock "cd2424b1-3842-4df4-8636-23417833ea49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.060018] env[62508]: DEBUG nova.compute.manager [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] No waiting events found dispatching network-vif-plugged-405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.060018] env[62508]: WARNING nova.compute.manager [req-23d0d33b-07b8-4b81-bdd1-ab5d73bc6a14 req-ba782115-c7c0-49aa-aa81-033621dca05c service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Received unexpected event network-vif-plugged-405eb618-22d6-4623-a68c-d19671b3adf1 for instance with vm_state building and task_state spawning. [ 1821.084599] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.084716] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.084846] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing instance network info cache due to event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1821.085062] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.085210] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.085372] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.144111] env[62508]: DEBUG oslo_vmware.api [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.640952} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.144376] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.144561] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.144737] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.166975] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Successfully updated port: cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1821.178572] env[62508]: INFO nova.scheduler.client.report [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance a15f3cef-c260-4a54-83af-7cccf81e15a6 [ 1821.185400] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Successfully updated port: 405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1821.499508] env[62508]: DEBUG nova.compute.manager [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62508) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1821.670596] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.670764] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.670921] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1821.686710] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.686972] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.687214] env[62508]: DEBUG nova.objects.instance [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'resources' on Instance uuid a15f3cef-c260-4a54-83af-7cccf81e15a6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1821.688367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.688502] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.688662] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1821.705951] env[62508]: DEBUG oslo_concurrency.lockutils [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.706226] env[62508]: DEBUG oslo_concurrency.lockutils [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.726294] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.726294] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.726294] env[62508]: DEBUG nova.network.neutron [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1821.874318] env[62508]: DEBUG nova.compute.manager [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Received event network-vif-plugged-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.874549] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.874757] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.874922] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.875103] env[62508]: DEBUG nova.compute.manager [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] No waiting events found dispatching network-vif-plugged-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.875273] env[62508]: WARNING nova.compute.manager [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Received unexpected event network-vif-plugged-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 for instance with vm_state building and task_state spawning. [ 1821.875435] env[62508]: DEBUG nova.compute.manager [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Received event network-changed-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.875589] env[62508]: DEBUG nova.compute.manager [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Refreshing instance network info cache due to event network-changed-cfd14da4-5054-4b3f-bb35-eeefcb6843a9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1821.875752] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Acquiring lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.876595] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updated VIF entry in instance network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1821.876915] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.196139] env[62508]: DEBUG nova.objects.instance [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'numa_topology' on Instance uuid a15f3cef-c260-4a54-83af-7cccf81e15a6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.208343] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1822.210900] env[62508]: INFO nova.compute.manager [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Detaching volume 6a9e4102-a8e2-41b9-9290-7b0979ea805b [ 1822.227379] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1822.255715] env[62508]: INFO nova.virt.block_device [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Attempting to driver detach volume 6a9e4102-a8e2-41b9-9290-7b0979ea805b from mountpoint /dev/sdb [ 1822.255940] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1822.256130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368796', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'name': 'volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8ce13c4-ea95-4343-8eab-8a0dafbf0e03', 'attached_at': '', 'detached_at': '', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'serial': '6a9e4102-a8e2-41b9-9290-7b0979ea805b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1822.257044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7a9f3d-645f-4c35-8172-f539db0922f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.286851] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec164d3-70f4-472c-a8c1-818653d193cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.301042] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7788d9bc-237d-436b-bcfa-5511d6f13247 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.329915] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b96b0ab-556a-4a12-aa89-9f2a38f9c08e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.347786] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] The volume has not been displaced from its original location: [datastore1] volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b/volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1822.353564] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfiguring VM instance instance-0000003c to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1822.353911] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51f649a5-54a1-4b63-8821-7ba0bdb075f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.380039] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.380352] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1822.380513] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing instance network info cache due to event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1822.380744] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.380900] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.381081] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1822.387020] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1822.387020] env[62508]: value = "task-1776674" [ 1822.387020] env[62508]: _type = "Task" [ 1822.387020] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.393302] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776674, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.434043] env[62508]: DEBUG nova.network.neutron [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updating instance_info_cache with network_info: [{"id": "405eb618-22d6-4623-a68c-d19671b3adf1", "address": "fa:16:3e:05:41:ee", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405eb618-22", "ovs_interfaceid": "405eb618-22d6-4623-a68c-d19671b3adf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.526709] env[62508]: DEBUG nova.network.neutron [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating instance_info_cache with network_info: [{"id": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "address": "fa:16:3e:7a:3e:02", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd14da4-50", "ovs_interfaceid": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.591728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.625612] env[62508]: DEBUG nova.network.neutron [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.699893] env[62508]: DEBUG nova.objects.base [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1822.854271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b64494c-b2f1-4827-b8eb-c63c08b0dea7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.862263] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f6bcee-1dcb-4786-bb16-2c9d2dcd16a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.899949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86e66a6-a05e-4b2f-945e-254eaccb247e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.910810] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0069d2-2598-4c02-8235-c4c7668934cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.915664] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776674, 'name': ReconfigVM_Task, 'duration_secs': 0.256056} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.915947] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Reconfigured VM instance instance-0000003c to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1822.921213] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bad669d6-928a-44af-aaa3-c47552c177b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.941778] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.942071] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Instance network_info: |[{"id": "405eb618-22d6-4623-a68c-d19671b3adf1", "address": "fa:16:3e:05:41:ee", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405eb618-22", "ovs_interfaceid": "405eb618-22d6-4623-a68c-d19671b3adf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1822.942539] env[62508]: DEBUG nova.compute.provider_tree [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.944969] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:41:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '405eb618-22d6-4623-a68c-d19671b3adf1', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1822.954205] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating folder: Project (86e6f83751b0446fb8f00684082f018a). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1822.957641] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f28afb95-64ca-49dd-9c1b-1f004ffc34d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.959511] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1822.959511] env[62508]: value = "task-1776675" [ 1822.959511] env[62508]: _type = "Task" [ 1822.959511] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.968856] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.970148] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created folder: Project (86e6f83751b0446fb8f00684082f018a) in parent group-v368536. [ 1822.970375] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating folder: Instances. Parent ref: group-v368822. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1822.970625] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8350aa30-423b-452c-89f9-5b8ccefdd968 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.979342] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created folder: Instances in parent group-v368822. [ 1822.979571] env[62508]: DEBUG oslo.service.loopingcall [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.979754] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1822.980012] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21bf1a3d-8838-4cb4-bfb3-d509d705e0d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.999919] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.999919] env[62508]: value = "task-1776678" [ 1822.999919] env[62508]: _type = "Task" [ 1822.999919] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.011021] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776678, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.029357] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.029667] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Instance network_info: |[{"id": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "address": "fa:16:3e:7a:3e:02", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd14da4-50", "ovs_interfaceid": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1823.032058] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Acquired lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.032256] env[62508]: DEBUG nova.network.neutron [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Refreshing network info cache for port cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1823.034141] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:3e:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0d7a2b2f-3b49-4dc8-9096-af16144b27a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfd14da4-5054-4b3f-bb35-eeefcb6843a9', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1823.042311] env[62508]: DEBUG oslo.service.loopingcall [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.043208] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1823.043847] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1cceabe-f835-4adf-83e4-17b9e4e2739c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.066579] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1823.066579] env[62508]: value = "task-1776679" [ 1823.066579] env[62508]: _type = "Task" [ 1823.066579] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.077511] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776679, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.094354] env[62508]: DEBUG nova.compute.manager [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Received event network-changed-405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.094561] env[62508]: DEBUG nova.compute.manager [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Refreshing instance network info cache due to event network-changed-405eb618-22d6-4623-a68c-d19671b3adf1. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1823.094767] env[62508]: DEBUG oslo_concurrency.lockutils [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] Acquiring lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.094935] env[62508]: DEBUG oslo_concurrency.lockutils [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] Acquired lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.095067] env[62508]: DEBUG nova.network.neutron [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Refreshing network info cache for port 405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1823.128056] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.128712] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.170049] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updated VIF entry in instance network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.170517] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.456778] env[62508]: DEBUG nova.scheduler.client.report [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1823.469873] env[62508]: DEBUG oslo_vmware.api [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776675, 'name': ReconfigVM_Task, 'duration_secs': 0.15024} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.470248] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368796', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'name': 'volume-6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'a8ce13c4-ea95-4343-8eab-8a0dafbf0e03', 'attached_at': '', 'detached_at': '', 'volume_id': '6a9e4102-a8e2-41b9-9290-7b0979ea805b', 'serial': '6a9e4102-a8e2-41b9-9290-7b0979ea805b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1823.510129] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776678, 'name': CreateVM_Task, 'duration_secs': 0.450959} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.510301] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1823.511057] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.511231] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.511590] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1823.511849] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34e706eb-80a4-450b-8f9b-aadd805386e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.516260] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1823.516260] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52833488-6b48-af08-2741-4989b2b2a8ad" [ 1823.516260] env[62508]: _type = "Task" [ 1823.516260] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.523983] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52833488-6b48-af08-2741-4989b2b2a8ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.578727] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776679, 'name': CreateVM_Task, 'duration_secs': 0.351547} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.578928] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1823.579627] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.637534] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45946ef3-98a1-41e7-8fcc-ecc0e615af78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.644591] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7656cec-b10b-4786-b2ed-6c700bbe9453 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.673539] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.673993] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.673993] env[62508]: DEBUG nova.compute.manager [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1823.674173] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.674315] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.674472] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1823.858166] env[62508]: DEBUG nova.network.neutron [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updated VIF entry in instance network info cache for port cfd14da4-5054-4b3f-bb35-eeefcb6843a9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.858166] env[62508]: DEBUG nova.network.neutron [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating instance_info_cache with network_info: [{"id": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "address": "fa:16:3e:7a:3e:02", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd14da4-50", "ovs_interfaceid": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.924618] env[62508]: DEBUG nova.network.neutron [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updated VIF entry in instance network info cache for port 405eb618-22d6-4623-a68c-d19671b3adf1. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.924968] env[62508]: DEBUG nova.network.neutron [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updating instance_info_cache with network_info: [{"id": "405eb618-22d6-4623-a68c-d19671b3adf1", "address": "fa:16:3e:05:41:ee", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405eb618-22", "ovs_interfaceid": "405eb618-22d6-4623-a68c-d19671b3adf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.961726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.964151] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 1.373s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.016089] env[62508]: DEBUG nova.objects.instance [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'flavor' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.027649] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52833488-6b48-af08-2741-4989b2b2a8ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011418} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.027978] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.028674] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.028922] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.029093] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.029333] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1824.029558] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.029866] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1824.030156] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e90dfaa-7e5f-46e2-aca7-f411a80f3077 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.032664] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f5784f3-8959-4426-a39a-60c974fe1146 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.038879] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1824.038879] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52664e77-a5f7-f954-944b-663e57055ef1" [ 1824.038879] env[62508]: _type = "Task" [ 1824.038879] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.043187] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1824.043366] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1824.044362] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0651dad-97b7-4410-a24d-18eb6818fe2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.049867] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52664e77-a5f7-f954-944b-663e57055ef1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.053021] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1824.053021] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527a7f1d-4421-7024-9707-3f6487331e3d" [ 1824.053021] env[62508]: _type = "Task" [ 1824.053021] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.060873] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527a7f1d-4421-7024-9707-3f6487331e3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.360712] env[62508]: DEBUG oslo_concurrency.lockutils [req-98262976-3c5c-4c59-8aad-baf4b65fc7b5 req-68bb8e5a-686c-45d4-bdd1-afe44fb43f15 service nova] Releasing lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.397568] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1824.397990] env[62508]: DEBUG nova.network.neutron [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.427555] env[62508]: DEBUG oslo_concurrency.lockutils [req-9bb46e88-3e8b-4dab-833e-bf0191b339d9 req-37ed6a07-d763-49e4-b8cf-47170fe23700 service nova] Releasing lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.467999] env[62508]: DEBUG nova.objects.instance [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'migration_context' on Instance uuid 2aeb5a4c-785a-4238-8575-ecd1ff84b97c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.473729] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f5619331-4452-44b6-b018-78671215c512 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.689s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.475168] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.347s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.475168] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.475301] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.475438] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.477421] env[62508]: INFO nova.compute.manager [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Terminating instance [ 1824.479117] env[62508]: DEBUG nova.compute.manager [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1824.479318] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1824.479579] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-306e4617-cc24-4a43-a4f4-3e1b3aeebc3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.490156] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0a5d84-0693-4eb7-a5a3-6a74028752db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.525518] env[62508]: WARNING nova.virt.vmwareapi.vmops [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a15f3cef-c260-4a54-83af-7cccf81e15a6 could not be found. [ 1824.525738] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1824.525916] env[62508]: INFO nova.compute.manager [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1824.526180] env[62508]: DEBUG oslo.service.loopingcall [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.527670] env[62508]: DEBUG nova.compute.manager [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1824.527670] env[62508]: DEBUG nova.network.neutron [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1824.550289] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52664e77-a5f7-f954-944b-663e57055ef1, 'name': SearchDatastore_Task, 'duration_secs': 0.021588} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.550670] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.550941] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.551245] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.563245] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527a7f1d-4421-7024-9707-3f6487331e3d, 'name': SearchDatastore_Task, 'duration_secs': 0.008521} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.564058] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ddd6ec6-1f8a-419a-a003-ba5bece6ba53 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.569455] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1824.569455] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f971f-5000-4976-f746-af0757f63330" [ 1824.569455] env[62508]: _type = "Task" [ 1824.569455] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.579823] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f971f-5000-4976-f746-af0757f63330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.588352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.588617] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.588827] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.589037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.589228] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.591385] env[62508]: INFO nova.compute.manager [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Terminating instance [ 1824.593224] env[62508]: DEBUG nova.compute.manager [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1824.593497] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1824.594238] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0235b7a-98c5-4f24-8552-c95c457f6e35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.601240] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1824.601432] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f191eb8b-8de7-4643-b311-a50e6ae6146f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.608358] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1824.608358] env[62508]: value = "task-1776680" [ 1824.608358] env[62508]: _type = "Task" [ 1824.608358] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.616300] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.759142] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b1dc45-ead0-46cd-b27f-e638f6f13cfd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.784180] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea601c06-4ed7-42d2-9830-4284870efa84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.792776] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1824.901276] env[62508]: DEBUG oslo_concurrency.lockutils [req-c80514e2-0bab-4057-864a-035d8faca630 req-87dd99b6-e236-468b-8aa2-3179a9e0a8b0 service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.030935] env[62508]: DEBUG oslo_concurrency.lockutils [None req-62a112d4-65b6-4f99-8408-ee7b8615c5a3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.325s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.083831] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524f971f-5000-4976-f746-af0757f63330, 'name': SearchDatastore_Task, 'duration_secs': 0.012388} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.084114] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.084469] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] cd2424b1-3842-4df4-8636-23417833ea49/cd2424b1-3842-4df4-8636-23417833ea49.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.086689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.086899] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1825.087139] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bf2c32d-e0b2-4a73-bf0e-1c9095ecb655 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.089753] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45db167c-98bd-4cd9-aff4-f3b051c2c948 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.096270] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1825.096270] env[62508]: value = "task-1776681" [ 1825.096270] env[62508]: _type = "Task" [ 1825.096270] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.102478] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1825.102681] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1825.103871] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89588277-a830-4f81-8e21-47bde733d8e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.109147] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.113772] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1825.113772] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5214af4a-500e-adfe-787a-60fe7a79aee9" [ 1825.113772] env[62508]: _type = "Task" [ 1825.113772] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.123932] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776680, 'name': PowerOffVM_Task, 'duration_secs': 0.204746} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.124618] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1825.124842] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1825.125094] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48fcddaa-60ba-4c67-aad4-9986dfd9864b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.131587] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cfdfe0-99e5-4173-8bca-232de0cee997 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.134095] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5214af4a-500e-adfe-787a-60fe7a79aee9, 'name': SearchDatastore_Task, 'duration_secs': 0.00836} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.135476] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b90575-0f8f-4363-9913-763523ce812d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.140609] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011e5aa8-96cf-40e7-b033-11f968ea7501 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.144867] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1825.144867] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529b9069-4728-d0c8-d58d-332471a49198" [ 1825.144867] env[62508]: _type = "Task" [ 1825.144867] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.179166] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b45ee88-d2bd-4919-84bf-6fc8e7ea55b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.182208] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529b9069-4728-d0c8-d58d-332471a49198, 'name': SearchDatastore_Task, 'duration_secs': 0.007729} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.182496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.182764] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d3829b04-6d1f-44f0-8b94-30b582506ed4/d3829b04-6d1f-44f0-8b94-30b582506ed4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.183426] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a53f4e69-6284-4a3b-96ae-82d4b92019f2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.188806] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830cea93-db68-4c83-8e5c-f854ba25142d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.193776] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1825.193776] env[62508]: value = "task-1776683" [ 1825.193776] env[62508]: _type = "Task" [ 1825.193776] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.206122] env[62508]: DEBUG nova.compute.provider_tree [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.216152] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.222784] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1825.223051] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1825.223248] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Deleting the datastore file [datastore1] e7f521db-2dab-4c2c-bf2b-aa6e217f29bd {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.223520] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba1a5dcb-2b5a-45b9-b5fe-5c5010183503 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.231235] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for the task: (returnval){ [ 1825.231235] env[62508]: value = "task-1776684" [ 1825.231235] env[62508]: _type = "Task" [ 1825.231235] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.239886] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.266958] env[62508]: DEBUG nova.network.neutron [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.299814] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1825.300178] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab2239b-bbe6-4274-9ec8-3098798c3b3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.309123] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1825.309123] env[62508]: value = "task-1776685" [ 1825.309123] env[62508]: _type = "Task" [ 1825.309123] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.319471] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776685, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.607103] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497577} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.607518] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] cd2424b1-3842-4df4-8636-23417833ea49/cd2424b1-3842-4df4-8636-23417833ea49.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1825.607687] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1825.608022] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6076278b-cf90-4a64-b1f6-1e36669d5ff7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.614534] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1825.614534] env[62508]: value = "task-1776686" [ 1825.614534] env[62508]: _type = "Task" [ 1825.614534] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.622224] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.704415] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776683, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.709557] env[62508]: DEBUG nova.scheduler.client.report [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1825.743298] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.771180] env[62508]: INFO nova.compute.manager [-] [instance: a15f3cef-c260-4a54-83af-7cccf81e15a6] Took 1.24 seconds to deallocate network for instance. [ 1825.822496] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776685, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.998356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.998684] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.998901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.999098] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.999278] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.001636] env[62508]: INFO nova.compute.manager [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Terminating instance [ 1826.003476] env[62508]: DEBUG nova.compute.manager [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1826.003667] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1826.004537] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88497387-c676-4212-b986-d4852ada79eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.012172] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1826.012733] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff04ae2f-8936-49e5-b370-0df04361c970 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.019854] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1826.019854] env[62508]: value = "task-1776687" [ 1826.019854] env[62508]: _type = "Task" [ 1826.019854] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.027828] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.125757] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082528} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.126058] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1826.126828] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62794a58-9976-4b82-b009-8b93fb0548d6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.149498] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] cd2424b1-3842-4df4-8636-23417833ea49/cd2424b1-3842-4df4-8636-23417833ea49.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1826.149804] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc0ebbe2-ee63-4984-ab54-536446d0928d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.171641] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1826.171641] env[62508]: value = "task-1776688" [ 1826.171641] env[62508]: _type = "Task" [ 1826.171641] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.180626] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.205734] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.793205} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.205734] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d3829b04-6d1f-44f0-8b94-30b582506ed4/d3829b04-6d1f-44f0-8b94-30b582506ed4.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1826.206035] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1826.206375] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-980c9b41-6caf-43e9-bda9-0d44aebc6af9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.214064] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1826.214064] env[62508]: value = "task-1776689" [ 1826.214064] env[62508]: _type = "Task" [ 1826.214064] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.230608] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.240922] env[62508]: DEBUG oslo_vmware.api [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Task: {'id': task-1776684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.69446} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.241208] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1826.241421] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1826.241582] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1826.241773] env[62508]: INFO nova.compute.manager [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1826.242169] env[62508]: DEBUG oslo.service.loopingcall [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.242223] env[62508]: DEBUG nova.compute.manager [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1826.242321] env[62508]: DEBUG nova.network.neutron [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1826.324447] env[62508]: DEBUG oslo_vmware.api [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776685, 'name': PowerOnVM_Task, 'duration_secs': 0.659641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.324793] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1826.325089] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fd715527-c3ba-4d4a-96dc-033f49d8359f tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance 'd3455694-a157-404f-8153-a9f96bac49a2' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1826.530564] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776687, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.682125] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776688, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.732164] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.764s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.734991] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067061} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.737363] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1826.741415] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b118c93a-355a-4f3f-9e57-dcb830f1439a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.766316] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d3829b04-6d1f-44f0-8b94-30b582506ed4/d3829b04-6d1f-44f0-8b94-30b582506ed4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1826.767931] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d87a2882-f0b6-464b-bb5a-302e122dcb75 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.790401] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1826.790401] env[62508]: value = "task-1776690" [ 1826.790401] env[62508]: _type = "Task" [ 1826.790401] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.801089] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.802252] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e5e340fd-c7d0-4294-a0a4-7c76356af853 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a15f3cef-c260-4a54-83af-7cccf81e15a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.327s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.034911] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776687, 'name': PowerOffVM_Task, 'duration_secs': 0.5419} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.035423] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1827.035772] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1827.036200] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a839441e-7188-4473-91c5-3bad84d26a23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.136465] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1827.136712] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1827.136932] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1827.137212] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81214859-f415-4341-a48a-8e2abb11baed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.143523] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1827.143523] env[62508]: value = "task-1776692" [ 1827.143523] env[62508]: _type = "Task" [ 1827.143523] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.152657] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.168679] env[62508]: DEBUG nova.compute.manager [req-4b7b856a-e301-4c18-947d-0683e23aa55c req-33ba5e5c-d8a1-415e-9c12-1fcc09446283 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Received event network-vif-deleted-792efee6-358d-45ae-b4ff-79e8fea4ff64 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1827.168878] env[62508]: INFO nova.compute.manager [req-4b7b856a-e301-4c18-947d-0683e23aa55c req-33ba5e5c-d8a1-415e-9c12-1fcc09446283 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Neutron deleted interface 792efee6-358d-45ae-b4ff-79e8fea4ff64; detaching it from the instance and deleting it from the info cache [ 1827.169084] env[62508]: DEBUG nova.network.neutron [req-4b7b856a-e301-4c18-947d-0683e23aa55c req-33ba5e5c-d8a1-415e-9c12-1fcc09446283 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.183560] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776688, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.306058] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776690, 'name': ReconfigVM_Task, 'duration_secs': 0.281231} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.306128] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d3829b04-6d1f-44f0-8b94-30b582506ed4/d3829b04-6d1f-44f0-8b94-30b582506ed4.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1827.306749] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6a8e51a-b017-497b-95a9-05755cee316c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.314533] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1827.314533] env[62508]: value = "task-1776693" [ 1827.314533] env[62508]: _type = "Task" [ 1827.314533] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.324366] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776693, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.520481] env[62508]: DEBUG nova.network.neutron [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.654050] env[62508]: DEBUG oslo_vmware.api [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211128} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.654315] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1827.654501] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1827.654678] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1827.654854] env[62508]: INFO nova.compute.manager [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1827.655117] env[62508]: DEBUG oslo.service.loopingcall [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.655310] env[62508]: DEBUG nova.compute.manager [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1827.655437] env[62508]: DEBUG nova.network.neutron [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1827.671233] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f98e0c24-ca1b-42ce-a4ea-648231d5c618 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.681290] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45be4f2b-232c-4ce8-9e70-8b0ce8a8d924 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.696577] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776688, 'name': ReconfigVM_Task, 'duration_secs': 1.134984} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.697437] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Reconfigured VM instance instance-00000067 to attach disk [datastore1] cd2424b1-3842-4df4-8636-23417833ea49/cd2424b1-3842-4df4-8636-23417833ea49.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1827.698137] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37146ba0-3821-4ca5-95ad-e4682a8fb7cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.703905] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1827.703905] env[62508]: value = "task-1776694" [ 1827.703905] env[62508]: _type = "Task" [ 1827.703905] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.717019] env[62508]: DEBUG nova.compute.manager [req-4b7b856a-e301-4c18-947d-0683e23aa55c req-33ba5e5c-d8a1-415e-9c12-1fcc09446283 service nova] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Detach interface failed, port_id=792efee6-358d-45ae-b4ff-79e8fea4ff64, reason: Instance e7f521db-2dab-4c2c-bf2b-aa6e217f29bd could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1827.720361] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776694, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.824489] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776693, 'name': Rename_Task, 'duration_secs': 0.285354} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.824774] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1827.825032] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-798dfede-20f1-4efa-9820-7227f3f70b1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.831446] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1827.831446] env[62508]: value = "task-1776695" [ 1827.831446] env[62508]: _type = "Task" [ 1827.831446] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.839075] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.024098] env[62508]: INFO nova.compute.manager [-] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Took 1.78 seconds to deallocate network for instance. [ 1828.083716] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a6002275-d684-4609-9935-95180cff36d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.083980] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.215159] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776694, 'name': Rename_Task, 'duration_secs': 0.287449} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.215455] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1828.215740] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eca3b4c5-de5c-4461-90c9-16b07818284d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.222207] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1828.222207] env[62508]: value = "task-1776696" [ 1828.222207] env[62508]: _type = "Task" [ 1828.222207] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.230377] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.283600] env[62508]: INFO nova.compute.manager [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Swapping old allocation on dict_keys(['5d5b4923-a8ac-4688-9f86-2405bd3406a9']) held by migration 175bc17b-cf18-4307-8ec2-2cf47b9564d9 for instance [ 1828.314384] env[62508]: DEBUG nova.scheduler.client.report [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Overwriting current allocation {'allocations': {'5d5b4923-a8ac-4688-9f86-2405bd3406a9': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 164}}, 'project_id': 'ce113e91e2b74136a8050ed3acf3557c', 'user_id': '712ef76e285f48e6b5e8f75aa2fee850', 'consumer_generation': 1} on consumer 2aeb5a4c-785a-4238-8575-ecd1ff84b97c {{(pid=62508) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1828.343997] env[62508]: DEBUG oslo_vmware.api [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776695, 'name': PowerOnVM_Task, 'duration_secs': 0.468817} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.344315] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1828.344554] env[62508]: INFO nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Took 11.27 seconds to spawn the instance on the hypervisor. [ 1828.344683] env[62508]: DEBUG nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1828.345537] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3d3829-db27-4f62-a781-32e681247559 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.422456] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.422687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.422940] env[62508]: DEBUG nova.network.neutron [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1828.530851] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.531092] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.531327] env[62508]: DEBUG nova.objects.instance [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lazy-loading 'resources' on Instance uuid e7f521db-2dab-4c2c-bf2b-aa6e217f29bd {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1828.586044] env[62508]: DEBUG nova.network.neutron [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.588182] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1828.733597] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776696, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.863935] env[62508]: INFO nova.compute.manager [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Took 20.75 seconds to build instance. [ 1829.057696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.057942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.058162] env[62508]: DEBUG nova.compute.manager [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Going to confirm migration 8 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1829.092230] env[62508]: INFO nova.compute.manager [-] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Took 1.44 seconds to deallocate network for instance. [ 1829.111410] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.207147] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df12eaa-9ea2-4e33-a833-ad342168b16c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.215426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9a85f3-008b-47cd-8a56-c813a402884e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.252230] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0a3b65-6016-4c01-8588-80278beccfac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.255754] env[62508]: DEBUG nova.compute.manager [req-a1efd454-7562-4d7d-87e7-1cf6a27b153a req-e6795d6d-a863-4f1c-a597-2756ca9feb6d service nova] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Received event network-vif-deleted-a51ee93a-fba9-4802-9791-4c16f273346e {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1829.265096] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a726968b-c7be-43a6-be45-3ff0b112494f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.269292] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776696, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.282170] env[62508]: DEBUG nova.compute.provider_tree [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.368196] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7fbd069-98a3-49d1-9b61-fac14705057c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.263s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.370043] env[62508]: DEBUG nova.network.neutron [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [{"id": "87ce4777-2520-4432-a1ed-03e189684761", "address": "fa:16:3e:8b:d2:e9", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ce4777-25", "ovs_interfaceid": "87ce4777-2520-4432-a1ed-03e189684761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.594364] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.594555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquired lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.594738] env[62508]: DEBUG nova.network.neutron [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.594927] env[62508]: DEBUG nova.objects.instance [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'info_cache' on Instance uuid d3455694-a157-404f-8153-a9f96bac49a2 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.607720] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.756800] env[62508]: DEBUG oslo_vmware.api [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776696, 'name': PowerOnVM_Task, 'duration_secs': 1.144536} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.757097] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1829.757318] env[62508]: INFO nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Took 14.99 seconds to spawn the instance on the hypervisor. [ 1829.757497] env[62508]: DEBUG nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.758302] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615352c2-eccf-4862-854b-9c06a5ba55f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.787116] env[62508]: DEBUG nova.scheduler.client.report [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1829.872793] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-2aeb5a4c-785a-4238-8575-ecd1ff84b97c" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.873830] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b550b900-9825-4217-b148-103815af99d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.880544] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defa134e-0df5-4534-9762-ac5a4986445e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.276514] env[62508]: INFO nova.compute.manager [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Took 23.87 seconds to build instance. [ 1830.296290] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.299171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.188s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.300916] env[62508]: INFO nova.compute.claims [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1830.320357] env[62508]: INFO nova.scheduler.client.report [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Deleted allocations for instance e7f521db-2dab-4c2c-bf2b-aa6e217f29bd [ 1830.778634] env[62508]: DEBUG oslo_concurrency.lockutils [None req-362abbfa-2970-4989-910e-b050307e35f4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.376s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.807968] env[62508]: DEBUG nova.network.neutron [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [{"id": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "address": "fa:16:3e:68:ac:c2", "network": {"id": "cb7330fa-d489-4cd9-9416-e91de8e96ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1048162462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74c45615efbb425fbec8400f6d225892", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped5b1d50-d4", "ovs_interfaceid": "ed5b1d50-d456-43d1-887a-96dcb4f42cec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.828086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bbcbac70-cfd5-4242-b736-1b1d5a2439a0 tempest-ServersV294TestFqdnHostnames-1143331300 tempest-ServersV294TestFqdnHostnames-1143331300-project-member] Lock "e7f521db-2dab-4c2c-bf2b-aa6e217f29bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.239s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.965957] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.966338] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7cb7df6-7db5-47aa-93d0-b17c1e37ded9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.974956] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1830.974956] env[62508]: value = "task-1776697" [ 1830.974956] env[62508]: _type = "Task" [ 1830.974956] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.983063] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.031982] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.032285] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.032657] env[62508]: DEBUG nova.objects.instance [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid 0a4958d5-b9a9-4854-90ca-f19eb34cb15b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.297702] env[62508]: DEBUG nova.compute.manager [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Received event network-changed-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.297936] env[62508]: DEBUG nova.compute.manager [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Refreshing instance network info cache due to event network-changed-cfd14da4-5054-4b3f-bb35-eeefcb6843a9. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1831.298509] env[62508]: DEBUG oslo_concurrency.lockutils [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] Acquiring lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.298686] env[62508]: DEBUG oslo_concurrency.lockutils [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] Acquired lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.298896] env[62508]: DEBUG nova.network.neutron [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Refreshing network info cache for port cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1831.314168] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Releasing lock "refresh_cache-d3455694-a157-404f-8153-a9f96bac49a2" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.314638] env[62508]: DEBUG nova.objects.instance [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'migration_context' on Instance uuid d3455694-a157-404f-8153-a9f96bac49a2 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.477070] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd730477-7955-4d62-b6eb-59596e573ed3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.490405] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5930dc0-c4fa-4d52-be66-b7f83030822b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.495062] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776697, 'name': PowerOffVM_Task, 'duration_secs': 0.330643} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.495325] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.495969] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1831.496185] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1831.496334] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1831.496508] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1831.496658] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1831.496850] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1831.496986] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1831.497150] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1831.497310] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1831.497468] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1831.497632] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1831.503185] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05e283d2-bc7c-43d5-9115-661376002035 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.541894] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba80b86-adbf-4271-8f90-d9d14da26142 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.547566] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1831.547566] env[62508]: value = "task-1776698" [ 1831.547566] env[62508]: _type = "Task" [ 1831.547566] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.553136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c271302-6585-4058-bb09-f3501ac0a18e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.561597] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.569586] env[62508]: DEBUG nova.compute.provider_tree [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1831.645867] env[62508]: DEBUG nova.objects.instance [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid 0a4958d5-b9a9-4854-90ca-f19eb34cb15b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.817753] env[62508]: DEBUG nova.objects.base [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1831.818710] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ace519e-fe1b-439a-9425-125c6633f036 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.840046] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0909cc9e-9343-4320-8da2-b170385cd386 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.845853] env[62508]: DEBUG oslo_vmware.api [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1831.845853] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528b4c31-fbab-9a9a-d055-1680b785924b" [ 1831.845853] env[62508]: _type = "Task" [ 1831.845853] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.853836] env[62508]: DEBUG oslo_vmware.api [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528b4c31-fbab-9a9a-d055-1680b785924b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.060641] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776698, 'name': ReconfigVM_Task, 'duration_secs': 0.180079} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.061543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99eac7b-a86b-49d7-83e4-3919f82ace22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.074029] env[62508]: DEBUG nova.scheduler.client.report [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1832.096102] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1832.096373] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1832.096532] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1832.096719] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1832.096870] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1832.097040] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1832.097590] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1832.097800] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1832.097974] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1832.098158] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1832.098336] env[62508]: DEBUG nova.virt.hardware [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1832.103970] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6ab9d6a-2315-4ad3-a4dd-e4af25955852 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.110281] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1832.110281] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520fa301-c612-6ab9-9c62-2afb66f2bf2b" [ 1832.110281] env[62508]: _type = "Task" [ 1832.110281] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.119505] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520fa301-c612-6ab9-9c62-2afb66f2bf2b, 'name': SearchDatastore_Task, 'duration_secs': 0.007121} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.128501] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1832.129134] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15697c9f-86d9-4565-89b7-4f25eac08927 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.148935] env[62508]: DEBUG nova.objects.base [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance<0a4958d5-b9a9-4854-90ca-f19eb34cb15b> lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1832.148935] env[62508]: DEBUG nova.network.neutron [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.152549] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1832.152549] env[62508]: value = "task-1776699" [ 1832.152549] env[62508]: _type = "Task" [ 1832.152549] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.161064] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776699, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.210476] env[62508]: DEBUG nova.network.neutron [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updated VIF entry in instance network info cache for port cfd14da4-5054-4b3f-bb35-eeefcb6843a9. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.211441] env[62508]: DEBUG nova.network.neutron [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating instance_info_cache with network_info: [{"id": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "address": "fa:16:3e:7a:3e:02", "network": {"id": "121c7907-9028-4be7-9d23-48e5c34ec429", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-954413717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e682eb7bbd240afb2f6581c7478b99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0d7a2b2f-3b49-4dc8-9096-af16144b27a9", "external-id": "nsx-vlan-transportzone-492", "segmentation_id": 492, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfd14da4-50", "ovs_interfaceid": "cfd14da4-5054-4b3f-bb35-eeefcb6843a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.238213] env[62508]: DEBUG nova.policy [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1832.316189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.316448] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.357792] env[62508]: DEBUG oslo_vmware.api [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528b4c31-fbab-9a9a-d055-1680b785924b, 'name': SearchDatastore_Task, 'duration_secs': 0.009364} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.358260] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.600901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.601413] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1832.605025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.997s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.605025] env[62508]: DEBUG nova.objects.instance [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'resources' on Instance uuid a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1832.662744] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776699, 'name': ReconfigVM_Task, 'duration_secs': 0.276026} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.663019] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1832.664050] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0e5b58-7f45-4c40-9672-034fa9b604b0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.690516] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1832.690867] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbe402c5-fed5-41e8-911c-f09b693629b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.712319] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1832.712319] env[62508]: value = "task-1776700" [ 1832.712319] env[62508]: _type = "Task" [ 1832.712319] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.715792] env[62508]: DEBUG oslo_concurrency.lockutils [req-a19edfe6-7079-4638-aacb-9e261f5eee8b req-652e7dba-3272-466e-a448-3f9314441e54 service nova] Releasing lock "refresh_cache-d3829b04-6d1f-44f0-8b94-30b582506ed4" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.721804] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776700, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.819194] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1833.657915] env[62508]: DEBUG nova.compute.utils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1833.664353] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1833.664529] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1833.675133] env[62508]: DEBUG nova.compute.manager [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.675133] env[62508]: DEBUG oslo_concurrency.lockutils [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.675133] env[62508]: DEBUG oslo_concurrency.lockutils [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.675133] env[62508]: DEBUG oslo_concurrency.lockutils [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.675133] env[62508]: DEBUG nova.compute.manager [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] No waiting events found dispatching network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1833.675133] env[62508]: WARNING nova.compute.manager [req-750af11f-776b-4d94-9b04-632a4a4a0f5d req-089df898-67a4-48e0-8f27-b5233b2df5dc service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received unexpected event network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b for instance with vm_state active and task_state None. [ 1833.678377] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776700, 'name': ReconfigVM_Task, 'duration_secs': 0.693889} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.678649] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c/2aeb5a4c-785a-4238-8575-ecd1ff84b97c.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.679514] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59485ea6-4878-41b4-855c-348e4aa798b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.685093] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.703678] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645a404a-713c-4fe8-98d1-e8858bfb70b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.727828] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f40a2b3-4bca-4c30-927e-0cec279ad7b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.732191] env[62508]: DEBUG nova.policy [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1833.751924] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecea896-8d73-432f-bbe9-aaef146439e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.758825] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.759077] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-037992c6-5cc8-437b-8c05-d3de2dc8324f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.768045] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1833.768045] env[62508]: value = "task-1776701" [ 1833.768045] env[62508]: _type = "Task" [ 1833.768045] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.775579] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.843377] env[62508]: DEBUG nova.network.neutron [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Successfully updated port: 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1833.891102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05739735-0556-4cf4-94b1-ed79a153a086 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.901123] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c1a9c1-1664-4f9a-804c-b60f80f2c376 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.931280] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ddb168-ceef-4e71-afc4-2c76dad47ef3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.938742] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df700ecd-78ce-45fc-bfe7-e769857b4f82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.952348] env[62508]: DEBUG nova.compute.provider_tree [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.042579] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Successfully created port: 121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1834.167810] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1834.278695] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776701, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.350356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.350555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.350775] env[62508]: DEBUG nova.network.neutron [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.455923] env[62508]: DEBUG nova.scheduler.client.report [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1834.777804] env[62508]: DEBUG oslo_vmware.api [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776701, 'name': PowerOnVM_Task, 'duration_secs': 0.70295} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.778789] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1834.886077] env[62508]: WARNING nova.network.neutron [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1834.961159] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.357s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.966028] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.607s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.990903] env[62508]: INFO nova.scheduler.client.report [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted allocations for instance a8ce13c4-ea95-4343-8eab-8a0dafbf0e03 [ 1835.168362] env[62508]: DEBUG nova.network.neutron [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "address": "fa:16:3e:c1:89:65", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4a8a8e-26", "ovs_interfaceid": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.185218] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1835.214145] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1835.214398] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1835.214562] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.214732] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1835.214878] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.215119] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1835.215369] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1835.215537] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1835.215702] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1835.215873] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1835.216056] env[62508]: DEBUG nova.virt.hardware [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1835.216978] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980968c6-5b3a-4dc0-8aa4-a550b5a5d1a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.225910] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb68fb87-f94f-4d44-baa5-996aa7991ff2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.499673] env[62508]: DEBUG oslo_concurrency.lockutils [None req-87aba254-bf68-4d84-a7fc-557380c5c9d3 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "a8ce13c4-ea95-4343-8eab-8a0dafbf0e03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.501s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.549757] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Successfully updated port: 121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1835.651427] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e3d0c3-ec4b-459e-8e65-95b4a6fc6a25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.659579] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0a7190-050c-4265-9c4c-4f471380bf87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.699335] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.699335] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.699335] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.699335] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a59c56-a14b-408f-a500-da0618eef266 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.702123] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9906f042-52b5-4ce1-a737-7cec7733faaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.722795] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68449720-2e96-4952-b78f-0bdc70025343 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.728267] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1835.728505] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1835.728687] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.729093] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1835.729257] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.729409] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1835.729778] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1835.729987] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1835.730180] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1835.730346] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1835.730516] env[62508]: DEBUG nova.virt.hardware [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1835.738668] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfiguring VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1835.738668] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7d440ed-5d93-4023-b3d2-1e0dc1bf0db0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.754896] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1835.755067] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1835.755291] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.755432] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.755940] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1835.767262] env[62508]: DEBUG nova.compute.provider_tree [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.772195] env[62508]: DEBUG oslo_vmware.api [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1835.772195] env[62508]: value = "task-1776702" [ 1835.772195] env[62508]: _type = "Task" [ 1835.772195] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.782661] env[62508]: DEBUG oslo_vmware.api [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776702, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.831131] env[62508]: INFO nova.compute.manager [None req-cc250ff3-1747-4867-a5a6-a3c3c9b66e47 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance to original state: 'active' [ 1836.055512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.056265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.056265] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1836.272528] env[62508]: DEBUG nova.scheduler.client.report [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1836.286070] env[62508]: DEBUG oslo_vmware.api [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.794023] env[62508]: DEBUG oslo_vmware.api [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776702, 'name': ReconfigVM_Task, 'duration_secs': 0.813541} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.794023] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.794023] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfigured VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1836.827943] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1836.859888] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.860188] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "address": "fa:16:3e:c1:89:65", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4a8a8e-26", "ovs_interfaceid": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.068593] env[62508]: DEBUG nova.network.neutron [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Updating instance_info_cache with network_info: [{"id": "121e891d-bfe4-4624-a343-95db7afe5757", "address": "fa:16:3e:45:1c:a0", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap121e891d-bf", "ovs_interfaceid": "121e891d-bfe4-4624-a343-95db7afe5757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.289265] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.322s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.290857] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.606s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.292801] env[62508]: INFO nova.compute.claims [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1837.298454] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3e5bb91a-1c92-4014-93f8-347ca707ea2b tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.265s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.366403] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.366837] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Received event network-vif-plugged-121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.367175] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Acquiring lock "a6002275-d684-4609-9935-95180cff36d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.367481] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Lock "a6002275-d684-4609-9935-95180cff36d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.367753] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Lock "a6002275-d684-4609-9935-95180cff36d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.367987] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] No waiting events found dispatching network-vif-plugged-121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1837.368220] env[62508]: WARNING nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Received unexpected event network-vif-plugged-121e891d-bfe4-4624-a343-95db7afe5757 for instance with vm_state building and task_state spawning. [ 1837.368403] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Received event network-changed-121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.368646] env[62508]: DEBUG nova.compute.manager [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Refreshing instance network info cache due to event network-changed-121e891d-bfe4-4624-a343-95db7afe5757. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1837.368741] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Acquiring lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.574697] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.574697] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Instance network_info: |[{"id": "121e891d-bfe4-4624-a343-95db7afe5757", "address": "fa:16:3e:45:1c:a0", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap121e891d-bf", "ovs_interfaceid": "121e891d-bfe4-4624-a343-95db7afe5757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1837.574697] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Acquired lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.574697] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Refreshing network info cache for port 121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1837.578432] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:1c:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '121e891d-bfe4-4624-a343-95db7afe5757', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1837.584418] env[62508]: DEBUG oslo.service.loopingcall [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1837.585648] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6002275-d684-4609-9935-95180cff36d8] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1837.585966] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a1fca44-77b1-487c-ace1-a5d897b3548d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.610789] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1837.610789] env[62508]: value = "task-1776703" [ 1837.610789] env[62508]: _type = "Task" [ 1837.610789] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.622542] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776703, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.646515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.646515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.646515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.646515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.646515] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.649356] env[62508]: INFO nova.compute.manager [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Terminating instance [ 1837.652232] env[62508]: DEBUG nova.compute.manager [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1837.652515] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1837.652952] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f493983-0b3f-44db-aacc-c74522eb839b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.661403] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1837.661403] env[62508]: value = "task-1776704" [ 1837.661403] env[62508]: _type = "Task" [ 1837.661403] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.671591] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.860279] env[62508]: INFO nova.scheduler.client.report [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocation for migration db69c3cd-b958-40bd-b854-dd7203af037b [ 1838.124694] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776703, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.171535] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776704, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.370085] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e98cdaad-1a85-4190-9601-d7c5a5f6c414 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.311s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.418581] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Updated VIF entry in instance network info cache for port 121e891d-bfe4-4624-a343-95db7afe5757. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1838.418745] env[62508]: DEBUG nova.network.neutron [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Updating instance_info_cache with network_info: [{"id": "121e891d-bfe4-4624-a343-95db7afe5757", "address": "fa:16:3e:45:1c:a0", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap121e891d-bf", "ovs_interfaceid": "121e891d-bfe4-4624-a343-95db7afe5757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.448024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.448024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.533042] env[62508]: DEBUG nova.compute.manager [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1838.564958] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f09a010-dc40-468b-8790-c183dafe83f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.573988] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c110564c-f04e-4953-a3b6-10be6731274b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.583954] env[62508]: INFO nova.compute.manager [None req-f93769c1-e25d-4da3-a72c-651ac50c1cc1 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Get console output [ 1838.583954] env[62508]: WARNING nova.virt.vmwareapi.driver [None req-f93769c1-e25d-4da3-a72c-651ac50c1cc1 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] The console log is missing. Check your VSPC configuration [ 1838.617148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7726db-a71f-49d5-9a01-613434b1ecd5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.628102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250e77eb-713f-4491-8dee-916dde636d25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.631678] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776703, 'name': CreateVM_Task, 'duration_secs': 0.579753} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.632333] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6002275-d684-4609-9935-95180cff36d8] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1838.633285] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.633435] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.633742] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1838.633989] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5c8a28f-42af-4716-bdf8-83639e1af75d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.643570] env[62508]: DEBUG nova.compute.provider_tree [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.649184] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1838.649184] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521f8325-8ca0-a134-0bf6-d35a07f9548f" [ 1838.649184] env[62508]: _type = "Task" [ 1838.649184] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.660194] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521f8325-8ca0-a134-0bf6-d35a07f9548f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.669650] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776704, 'name': PowerOffVM_Task, 'duration_secs': 0.544949} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.669904] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1838.670181] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1838.670393] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368813', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'name': 'volume-486f8980-1071-4c4e-aa7e-b41e69850aa9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '2aeb5a4c-785a-4238-8575-ecd1ff84b97c', 'attached_at': '2024-12-11T22:18:08.000000', 'detached_at': '', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'serial': '486f8980-1071-4c4e-aa7e-b41e69850aa9'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1838.671225] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38d5206-2fd8-4788-a781-a22e1c8be5cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.692414] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a7815a-66ad-4502-92f5-483b2e015c08 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.699648] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57a2f99-4b46-4ff3-9751-f53d90904ec0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.719696] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed561ad-d17f-486d-a85a-95b99b3e0fde {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.736694] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] The volume has not been displaced from its original location: [datastore1] volume-486f8980-1071-4c4e-aa7e-b41e69850aa9/volume-486f8980-1071-4c4e-aa7e-b41e69850aa9.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1838.742111] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfiguring VM instance instance-0000005b to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1838.742738] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0dccdcf-42c4-4ed4-9c57-eda31fe99281 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.761193] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1838.761193] env[62508]: value = "task-1776705" [ 1838.761193] env[62508]: _type = "Task" [ 1838.761193] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.769385] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776705, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.879525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.879798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.922569] env[62508]: DEBUG oslo_concurrency.lockutils [req-82ea9f65-a5db-49c7-a619-4dd1b4c58efb req-b7bebdf1-9c8b-4467-b3ea-83779623e720 service nova] Releasing lock "refresh_cache-a6002275-d684-4609-9935-95180cff36d8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.950389] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1839.049464] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.146397] env[62508]: DEBUG nova.scheduler.client.report [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1839.159409] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521f8325-8ca0-a134-0bf6-d35a07f9548f, 'name': SearchDatastore_Task, 'duration_secs': 0.010993} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.159791] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.160195] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1839.160344] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.160545] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.160795] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.161103] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-520e86d9-8518-4041-a35e-8083c0531a48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.170376] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.170606] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1839.171374] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab6ff5bb-6006-49a6-9303-3a3e13c9ad84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.176796] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1839.176796] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241e435-4db5-9402-d4e3-7edebfe56f33" [ 1839.176796] env[62508]: _type = "Task" [ 1839.176796] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.184253] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241e435-4db5-9402-d4e3-7edebfe56f33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.270657] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776705, 'name': ReconfigVM_Task, 'duration_secs': 0.217564} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.270997] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Reconfigured VM instance instance-0000005b to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1839.275721] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90b9b7a1-0233-45e3-aa52-d7962d2401a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.291527] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1839.291527] env[62508]: value = "task-1776706" [ 1839.291527] env[62508]: _type = "Task" [ 1839.291527] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.299902] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.382816] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.383078] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.384094] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74c4931-4cf9-4a9f-b8bf-a743f6b53a03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.402098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be56616-7c55-43d7-b879-b8ad5c1adc58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.428322] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfiguring VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1839.428938] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e3d1ff-ecc2-47dd-a760-7c0e1f798978 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.449712] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1839.449712] env[62508]: value = "task-1776707" [ 1839.449712] env[62508]: _type = "Task" [ 1839.449712] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.461276] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.476970] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.653787] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.654357] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1839.657062] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.608s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.686839] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241e435-4db5-9402-d4e3-7edebfe56f33, 'name': SearchDatastore_Task, 'duration_secs': 0.008507} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.687951] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7311e611-0ecd-4186-b8f8-119f877493b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.692724] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1839.692724] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52315146-6166-261f-a059-13b937538633" [ 1839.692724] env[62508]: _type = "Task" [ 1839.692724] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.701567] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52315146-6166-261f-a059-13b937538633, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.801342] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776706, 'name': ReconfigVM_Task, 'duration_secs': 0.18077} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.801622] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368813', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'name': 'volume-486f8980-1071-4c4e-aa7e-b41e69850aa9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '2aeb5a4c-785a-4238-8575-ecd1ff84b97c', 'attached_at': '2024-12-11T22:18:08.000000', 'detached_at': '', 'volume_id': '486f8980-1071-4c4e-aa7e-b41e69850aa9', 'serial': '486f8980-1071-4c4e-aa7e-b41e69850aa9'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1839.801926] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.802672] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c02376-0d7e-4fd1-ab99-169fa34872fd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.808706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1839.808929] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6adbac4-cc4a-4ab3-b43d-46e23c835c22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.947262] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1839.947478] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1839.947661] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleting the datastore file [datastore1] 2aeb5a4c-785a-4238-8575-ecd1ff84b97c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.947930] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ac8300f-ebba-4b92-a13e-41af223d9d07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.959762] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.960966] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1839.960966] env[62508]: value = "task-1776709" [ 1839.960966] env[62508]: _type = "Task" [ 1839.960966] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.968168] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.159972] env[62508]: DEBUG nova.compute.utils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1840.164046] env[62508]: INFO nova.compute.claims [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1840.166940] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1840.167134] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1840.203733] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52315146-6166-261f-a059-13b937538633, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.203987] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.204263] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a6002275-d684-4609-9935-95180cff36d8/a6002275-d684-4609-9935-95180cff36d8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1840.204520] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de0e8e3a-77d8-4aee-9538-6d71f4f0c0f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.210744] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1840.210744] env[62508]: value = "task-1776710" [ 1840.210744] env[62508]: _type = "Task" [ 1840.210744] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.215057] env[62508]: DEBUG nova.policy [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1840.220793] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.463837] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.471711] env[62508]: DEBUG oslo_vmware.api [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173231} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.472081] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.472206] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1840.472390] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.472569] env[62508]: INFO nova.compute.manager [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Took 2.82 seconds to destroy the instance on the hypervisor. [ 1840.472816] env[62508]: DEBUG oslo.service.loopingcall [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.473028] env[62508]: DEBUG nova.compute.manager [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1840.473125] env[62508]: DEBUG nova.network.neutron [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.667760] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1840.675865] env[62508]: INFO nova.compute.resource_tracker [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating resource usage from migration 80926dc1-cf00-4cb3-84ef-5ec854d14f1f [ 1840.681260] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Successfully created port: 72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1840.723660] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776710, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457132} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.726326] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a6002275-d684-4609-9935-95180cff36d8/a6002275-d684-4609-9935-95180cff36d8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1840.726834] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1840.726988] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6e123a2-3811-4490-8bb5-b129646e3f00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.733539] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1840.733539] env[62508]: value = "task-1776711" [ 1840.733539] env[62508]: _type = "Task" [ 1840.733539] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.744162] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.877084] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d8010f-f74d-4fae-bcf4-d7bcc2f50ae3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.885100] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fa2800-bb54-4f34-95a4-1921db7494d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.914395] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8618e9b3-0247-4917-b558-95c732cd9b6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.921813] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173e7b37-a16f-49c7-ab4e-aee756e17bcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.934580] env[62508]: DEBUG nova.compute.provider_tree [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.960007] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.029736] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.029850] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.250317] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059978} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.250605] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1841.251546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c161e84b-6539-4605-93be-8d070afd9e8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.276437] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] a6002275-d684-4609-9935-95180cff36d8/a6002275-d684-4609-9935-95180cff36d8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1841.278163] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-084d0749-58c9-44c5-a435-7311c8d1b451 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.294550] env[62508]: DEBUG nova.compute.manager [req-4decbf43-ded1-411a-9b55-1434155bd69f req-18a85e0f-b676-4bff-a826-e4745376b639 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Received event network-vif-deleted-87ce4777-2520-4432-a1ed-03e189684761 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1841.294550] env[62508]: INFO nova.compute.manager [req-4decbf43-ded1-411a-9b55-1434155bd69f req-18a85e0f-b676-4bff-a826-e4745376b639 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Neutron deleted interface 87ce4777-2520-4432-a1ed-03e189684761; detaching it from the instance and deleting it from the info cache [ 1841.294729] env[62508]: DEBUG nova.network.neutron [req-4decbf43-ded1-411a-9b55-1434155bd69f req-18a85e0f-b676-4bff-a826-e4745376b639 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.303162] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1841.303162] env[62508]: value = "task-1776712" [ 1841.303162] env[62508]: _type = "Task" [ 1841.303162] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.314780] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776712, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.441147] env[62508]: DEBUG nova.scheduler.client.report [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1841.462555] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.535993] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.536184] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1841.658996] env[62508]: DEBUG nova.network.neutron [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.684499] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1841.711842] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1841.712153] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1841.712318] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.712505] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1841.712892] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.712892] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1841.712987] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1841.713196] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1841.713367] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1841.713567] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1841.713699] env[62508]: DEBUG nova.virt.hardware [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1841.714683] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef778df-44f7-4c0e-b10d-21dc7c372d0f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.722451] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe988bb-5b99-49fc-b37d-ae768b195bc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.797540] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca652704-ab5e-4a52-b8bc-9b48efb4b749 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.810781] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918d77be-6cc6-43fb-bd7b-5caa82bfcd66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.828428] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776712, 'name': ReconfigVM_Task, 'duration_secs': 0.349965} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.828623] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Reconfigured VM instance instance-00000069 to attach disk [datastore1] a6002275-d684-4609-9935-95180cff36d8/a6002275-d684-4609-9935-95180cff36d8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1841.829267] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a1d38ac-970b-4c94-864f-b4282c2c0e31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.835234] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1841.835234] env[62508]: value = "task-1776713" [ 1841.835234] env[62508]: _type = "Task" [ 1841.835234] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.847584] env[62508]: DEBUG nova.compute.manager [req-4decbf43-ded1-411a-9b55-1434155bd69f req-18a85e0f-b676-4bff-a826-e4745376b639 service nova] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Detach interface failed, port_id=87ce4777-2520-4432-a1ed-03e189684761, reason: Instance 2aeb5a4c-785a-4238-8575-ecd1ff84b97c could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1841.852913] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776713, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.946768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.289s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.947022] env[62508]: INFO nova.compute.manager [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Migrating [ 1841.954272] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.477s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.955794] env[62508]: INFO nova.compute.claims [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.974196] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.161978] env[62508]: INFO nova.compute.manager [-] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Took 1.69 seconds to deallocate network for instance. [ 1842.346440] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776713, 'name': Rename_Task, 'duration_secs': 0.150083} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.346874] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1842.346955] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e62425e3-a52e-4f5b-8cc3-45627a3e60ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.357622] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1842.357622] env[62508]: value = "task-1776714" [ 1842.357622] env[62508]: _type = "Task" [ 1842.357622] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.367149] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.378475] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Successfully updated port: 72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1842.469493] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.469730] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.469959] env[62508]: DEBUG nova.network.neutron [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1842.474031] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.706358] env[62508]: INFO nova.compute.manager [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Took 0.54 seconds to detach 1 volumes for instance. [ 1842.868842] env[62508]: DEBUG oslo_vmware.api [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776714, 'name': PowerOnVM_Task, 'duration_secs': 0.426131} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.869167] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1842.869369] env[62508]: INFO nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Took 7.68 seconds to spawn the instance on the hypervisor. [ 1842.869542] env[62508]: DEBUG nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1842.870393] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73129286-e191-424e-9608-30569d1e0202 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.881061] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.881194] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.881336] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1842.969210] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.151211] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71347dbf-2dc2-4f8f-bf1f-43cf7ec99197 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.159217] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8edff3e-0232-41dd-8dc9-8a9ce74aeeef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.191925] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f08481-b4f8-4566-847e-564593c2db92 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.200423] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8536eef7-611c-49ca-8690-0aa7cb858e3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.214758] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.215286] env[62508]: DEBUG nova.compute.provider_tree [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.228560] env[62508]: DEBUG nova.network.neutron [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.298792] env[62508]: DEBUG nova.compute.manager [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Received event network-vif-plugged-72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1843.299023] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Acquiring lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.299232] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.299401] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.299563] env[62508]: DEBUG nova.compute.manager [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] No waiting events found dispatching network-vif-plugged-72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1843.299721] env[62508]: WARNING nova.compute.manager [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Received unexpected event network-vif-plugged-72d5d39c-6144-4a83-b087-58b9ba4ffc16 for instance with vm_state building and task_state spawning. [ 1843.299877] env[62508]: DEBUG nova.compute.manager [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Received event network-changed-72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1843.300039] env[62508]: DEBUG nova.compute.manager [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Refreshing instance network info cache due to event network-changed-72d5d39c-6144-4a83-b087-58b9ba4ffc16. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1843.300204] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Acquiring lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.389470] env[62508]: INFO nova.compute.manager [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Took 14.30 seconds to build instance. [ 1843.435466] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1843.469909] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.654600] env[62508]: DEBUG nova.network.neutron [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Updating instance_info_cache with network_info: [{"id": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "address": "fa:16:3e:70:44:20", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d5d39c-61", "ovs_interfaceid": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.718958] env[62508]: DEBUG nova.scheduler.client.report [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1843.731444] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.891304] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc146d21-3780-4eb8-8a16-f7af54fd8f09 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.807s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.970708] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.157776] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.157776] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Instance network_info: |[{"id": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "address": "fa:16:3e:70:44:20", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d5d39c-61", "ovs_interfaceid": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1844.158025] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Acquired lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.158025] env[62508]: DEBUG nova.network.neutron [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Refreshing network info cache for port 72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1844.160033] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:44:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72d5d39c-6144-4a83-b087-58b9ba4ffc16', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1844.167313] env[62508]: DEBUG oslo.service.loopingcall [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.170749] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1844.171259] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e14013fd-f9fe-48c1-aa5b-2225e59e113b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.192040] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1844.192040] env[62508]: value = "task-1776715" [ 1844.192040] env[62508]: _type = "Task" [ 1844.192040] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.200742] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776715, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.224130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.224625] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1844.227261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.013s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.227489] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.251660] env[62508]: INFO nova.scheduler.client.report [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted allocations for instance 2aeb5a4c-785a-4238-8575-ecd1ff84b97c [ 1844.384302] env[62508]: DEBUG nova.network.neutron [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Updated VIF entry in instance network info cache for port 72d5d39c-6144-4a83-b087-58b9ba4ffc16. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1844.384805] env[62508]: DEBUG nova.network.neutron [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Updating instance_info_cache with network_info: [{"id": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "address": "fa:16:3e:70:44:20", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d5d39c-61", "ovs_interfaceid": "72d5d39c-6144-4a83-b087-58b9ba4ffc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.401058] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a6002275-d684-4609-9935-95180cff36d8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.401350] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.401535] env[62508]: DEBUG nova.compute.manager [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1844.402575] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18adb88e-e857-407b-9f6c-7d19b56ca292 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.410359] env[62508]: DEBUG nova.compute.manager [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1844.410988] env[62508]: DEBUG nova.objects.instance [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'flavor' on Instance uuid a6002275-d684-4609-9935-95180cff36d8 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1844.473611] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.703580] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776715, 'name': CreateVM_Task, 'duration_secs': 0.339531} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.703753] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1844.704450] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.704624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.704945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1844.705210] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d903b2-ab10-473d-aecd-6829cecc08a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.709658] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1844.709658] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529499be-b9ed-5b44-e5e5-14d5bf20b6ba" [ 1844.709658] env[62508]: _type = "Task" [ 1844.709658] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.717254] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529499be-b9ed-5b44-e5e5-14d5bf20b6ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.731632] env[62508]: DEBUG nova.compute.utils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1844.732949] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1844.733132] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1844.763279] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2a3acfb7-c9d4-4045-8601-82b4c0f5936a tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "2aeb5a4c-785a-4238-8575-ecd1ff84b97c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.117s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.772728] env[62508]: DEBUG nova.policy [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05df8f0c7c7c4d8e8e3dcc1646f7a56c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86a46b5a43dd41e48816a8d86e3685b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1844.888297] env[62508]: DEBUG oslo_concurrency.lockutils [req-b515d727-62c1-47d7-bac3-c123175bbe65 req-0cef1f02-da7c-4b0e-9484-2806bc9b0027 service nova] Releasing lock "refresh_cache-42f06dc8-e5f6-475e-ba42-15b4abc3139a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.916304] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.916717] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20bae265-b815-4923-9558-04cc21014e6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.925036] env[62508]: DEBUG oslo_vmware.api [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1844.925036] env[62508]: value = "task-1776716" [ 1844.925036] env[62508]: _type = "Task" [ 1844.925036] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.933768] env[62508]: DEBUG oslo_vmware.api [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.973118] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.056071] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Successfully created port: 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1845.220713] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529499be-b9ed-5b44-e5e5-14d5bf20b6ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010755} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.221255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.221621] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1845.221971] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.222352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.222669] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1845.223125] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1981b3a-57ac-43a1-b6a9-a56331c4e085 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.233464] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1845.233654] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1845.234434] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b08d3057-46f1-4f98-b8d1-a8ec5de72aa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.237390] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1845.249035] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1845.249035] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb102f-b7f4-df83-66e0-1e7c88102822" [ 1845.249035] env[62508]: _type = "Task" [ 1845.249035] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.249035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1aadc1f-f031-400f-8159-1a66a441cbe7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.278103] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1845.284610] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb102f-b7f4-df83-66e0-1e7c88102822, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.434600] env[62508]: DEBUG oslo_vmware.api [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776716, 'name': PowerOffVM_Task, 'duration_secs': 0.184846} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.434893] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.435071] env[62508]: DEBUG nova.compute.manager [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1845.435957] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48ed9cc-1079-42d4-b7a7-49466db27549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.473292] env[62508]: DEBUG oslo_vmware.api [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776707, 'name': ReconfigVM_Task, 'duration_secs': 5.843518} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.473564] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.473774] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Reconfigured VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1845.757210] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52cb102f-b7f4-df83-66e0-1e7c88102822, 'name': SearchDatastore_Task, 'duration_secs': 0.014621} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.758019] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83380fb8-bd66-4d8b-a7a9-6bb65a270f1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.763180] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1845.763180] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a64351-7eaa-baef-a074-3be3c0dfd8d6" [ 1845.763180] env[62508]: _type = "Task" [ 1845.763180] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.771090] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a64351-7eaa-baef-a074-3be3c0dfd8d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.784285] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.784846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ce0d374-66fb-4280-b31b-c6e368d2f834 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.792932] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1845.792932] env[62508]: value = "task-1776717" [ 1845.792932] env[62508]: _type = "Task" [ 1845.792932] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.802341] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.947812] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6709ec3c-0748-43f7-87bd-458d18add255 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.187867] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.188181] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.252872] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1846.273966] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52a64351-7eaa-baef-a074-3be3c0dfd8d6, 'name': SearchDatastore_Task, 'duration_secs': 0.033533} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.276097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.276383] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42f06dc8-e5f6-475e-ba42-15b4abc3139a/42f06dc8-e5f6-475e-ba42-15b4abc3139a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1846.276868] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddd0a9ab-0ba1-4197-849a-a3f134a5ff9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.282879] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1846.283126] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1846.283291] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1846.283471] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1846.283614] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1846.283757] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1846.283954] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1846.284138] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1846.284308] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1846.284469] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1846.284640] env[62508]: DEBUG nova.virt.hardware [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1846.285410] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879ae948-d719-491e-a250-63f1745261d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.289319] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1846.289319] env[62508]: value = "task-1776718" [ 1846.289319] env[62508]: _type = "Task" [ 1846.289319] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.299258] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096a4725-e5ab-448d-88af-48b4bd56e51a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.308349] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.313816] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776717, 'name': PowerOffVM_Task, 'duration_secs': 0.171145} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.322136] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.322351] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1846.542162] env[62508]: DEBUG nova.compute.manager [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.542508] env[62508]: DEBUG oslo_concurrency.lockutils [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.542705] env[62508]: DEBUG oslo_concurrency.lockutils [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.542798] env[62508]: DEBUG oslo_concurrency.lockutils [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.542961] env[62508]: DEBUG nova.compute.manager [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] No waiting events found dispatching network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1846.543446] env[62508]: WARNING nova.compute.manager [req-f106e131-f3be-4a1c-a5a8-1d158ca1e65e req-795f5b37-6dde-48ac-a07c-143bc419c137 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received unexpected event network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 for instance with vm_state building and task_state spawning. [ 1846.655906] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Successfully updated port: 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1846.690227] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1846.800986] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474137} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.801294] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 42f06dc8-e5f6-475e-ba42-15b4abc3139a/42f06dc8-e5f6-475e-ba42-15b4abc3139a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1846.801506] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1846.801762] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd697908-f16a-4b71-a624-258428b1de34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.808774] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1846.808774] env[62508]: value = "task-1776719" [ 1846.808774] env[62508]: _type = "Task" [ 1846.808774] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.809236] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a6002275-d684-4609-9935-95180cff36d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.809548] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.809768] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "a6002275-d684-4609-9935-95180cff36d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.809964] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.810156] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.812183] env[62508]: INFO nova.compute.manager [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Terminating instance [ 1846.814033] env[62508]: DEBUG nova.compute.manager [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1846.814267] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1846.819033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de6ba53-d84f-4f36-acd6-319066ec8cee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.828492] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1846.828721] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1846.828880] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1846.829088] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1846.829246] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1846.829394] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1846.829595] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1846.829756] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1846.829918] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1846.830089] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1846.830269] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1846.835534] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.838105] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d239e3f-719b-4122-86f2-f4aa58361227 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.848518] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c3dab46-77cd-484b-825f-bd9316936067 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.850171] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.856822] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1846.856822] env[62508]: value = "task-1776720" [ 1846.856822] env[62508]: _type = "Task" [ 1846.856822] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.867571] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776720, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.903574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.903859] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.904098] env[62508]: DEBUG nova.network.neutron [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1846.929030] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.930152] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.930152] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] a6002275-d684-4609-9935-95180cff36d8 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.930152] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04574bd9-66c1-4b3a-993e-12a7fa5f5e66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.936661] env[62508]: DEBUG oslo_vmware.api [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1846.936661] env[62508]: value = "task-1776722" [ 1846.936661] env[62508]: _type = "Task" [ 1846.936661] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.946639] env[62508]: DEBUG oslo_vmware.api [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.159748] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.159960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.160110] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1847.214349] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.214605] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.216627] env[62508]: INFO nova.compute.claims [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1847.319334] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069752} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.319634] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1847.320360] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f6c809-8f8c-4e85-87b8-0b27ae0cf42f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.344366] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 42f06dc8-e5f6-475e-ba42-15b4abc3139a/42f06dc8-e5f6-475e-ba42-15b4abc3139a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1847.344687] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05e7b241-9479-4dea-9366-3b21e27f825a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.368781] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776720, 'name': ReconfigVM_Task, 'duration_secs': 0.23462} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.370241] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1847.373636] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1847.373636] env[62508]: value = "task-1776723" [ 1847.373636] env[62508]: _type = "Task" [ 1847.373636] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.381702] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.449282] env[62508]: DEBUG oslo_vmware.api [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160115} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.449641] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1847.449886] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1847.450132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1847.450372] env[62508]: INFO nova.compute.manager [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: a6002275-d684-4609-9935-95180cff36d8] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1847.450877] env[62508]: DEBUG oslo.service.loopingcall [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.451282] env[62508]: DEBUG nova.compute.manager [-] [instance: a6002275-d684-4609-9935-95180cff36d8] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1847.451415] env[62508]: DEBUG nova.network.neutron [-] [instance: a6002275-d684-4609-9935-95180cff36d8] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1847.691670] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.822927] env[62508]: DEBUG nova.network.neutron [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.876767] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1847.877014] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1847.877188] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1847.877374] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1847.877522] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1847.877667] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1847.877868] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1847.878191] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1847.878438] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1847.878612] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1847.878790] env[62508]: DEBUG nova.virt.hardware [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1847.884517] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1847.890386] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab1c3f13-0913-429f-a63a-61aeab375730 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.910872] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776723, 'name': ReconfigVM_Task, 'duration_secs': 0.277253} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.917021] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 42f06dc8-e5f6-475e-ba42-15b4abc3139a/42f06dc8-e5f6-475e-ba42-15b4abc3139a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.917021] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1847.917021] env[62508]: value = "task-1776724" [ 1847.917021] env[62508]: _type = "Task" [ 1847.917021] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.917021] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cac0ba8-d7c9-4225-b240-cfc5ccbac3ee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.927233] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1847.927233] env[62508]: value = "task-1776725" [ 1847.927233] env[62508]: _type = "Task" [ 1847.927233] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.927438] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776724, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.936664] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776725, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.034073] env[62508]: INFO nova.network.neutron [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1848.034657] env[62508]: DEBUG nova.network.neutron [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.327590] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.327876] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance network_info: |[{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1848.330417] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:a7:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60434e32-b866-43d2-8a33-8925c4459e29', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1848.338768] env[62508]: DEBUG oslo.service.loopingcall [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1848.339341] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1848.339588] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54549eac-4b3e-4628-bc4a-c33b34d1c86e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.365608] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1848.365608] env[62508]: value = "task-1776726" [ 1848.365608] env[62508]: _type = "Task" [ 1848.365608] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.377451] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776726, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.417016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cb3dd1-68ae-4afb-be9e-3781c58253bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.429426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e279f8-e7e2-4510-a776-9ccff063b77b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.433057] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776724, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.443357] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776725, 'name': Rename_Task, 'duration_secs': 0.153827} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.475925] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1848.476495] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fccb165c-0338-42fa-bff3-722bdb965ebf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.478657] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfd0f70-e367-4384-9f93-c7382aa656db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.487945] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c7645a-ac5e-4fb9-a6fe-69bdd8adc06b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.491731] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1848.491731] env[62508]: value = "task-1776727" [ 1848.491731] env[62508]: _type = "Task" [ 1848.491731] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.502832] env[62508]: DEBUG nova.compute.provider_tree [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.509632] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776727, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.540915] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.578879] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-changed-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1848.579109] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing instance network info cache due to event network-changed-60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1848.579396] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.579518] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.579709] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1848.635439] env[62508]: DEBUG nova.network.neutron [-] [instance: a6002275-d684-4609-9935-95180cff36d8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.876018] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776726, 'name': CreateVM_Task, 'duration_secs': 0.396897} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.876319] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1848.876866] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.877044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.877377] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1848.877628] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-483c64bf-69ac-4cfc-a1cd-98f1a2294e78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.881855] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1848.881855] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285ee01-2c93-bac7-8070-2968ecba7dad" [ 1848.881855] env[62508]: _type = "Task" [ 1848.881855] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.888897] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285ee01-2c93-bac7-8070-2968ecba7dad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.926633] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776724, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.002404] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776727, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.006417] env[62508]: DEBUG nova.scheduler.client.report [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1849.028224] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.028477] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.028828] env[62508]: DEBUG nova.objects.instance [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'flavor' on Instance uuid aa7c5176-4420-44b1-9fea-6db7561492c7 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.043914] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cfa3578c-4f63-4aec-9b23-66f8b9c0f453 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-0a4958d5-b9a9-4854-90ca-f19eb34cb15b-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.164s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.139308] env[62508]: INFO nova.compute.manager [-] [instance: a6002275-d684-4609-9935-95180cff36d8] Took 1.69 seconds to deallocate network for instance. [ 1849.294212] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updated VIF entry in instance network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1849.294624] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.392907] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5285ee01-2c93-bac7-8070-2968ecba7dad, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.393352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.393667] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1849.393913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.394198] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.394285] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1849.394539] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41c9437f-fd87-48bb-911c-0ff0f0ff251c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.403844] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1849.404058] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1849.404742] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2c10e03-2d2f-4e2d-b3e7-869249320ede {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.410239] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1849.410239] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e255b5-a3cd-d728-d7ad-421bc990bf05" [ 1849.410239] env[62508]: _type = "Task" [ 1849.410239] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.417603] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e255b5-a3cd-d728-d7ad-421bc990bf05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.426138] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776724, 'name': ReconfigVM_Task, 'duration_secs': 1.175516} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.426385] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1849.427121] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c8a900-2dfd-4cc6-ac89-309b3a836120 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.449593] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1849.449953] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f255fe95-d4b1-486a-83b8-4402d6893540 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.468395] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1849.468395] env[62508]: value = "task-1776728" [ 1849.468395] env[62508]: _type = "Task" [ 1849.468395] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.476868] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.502505] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776727, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.511577] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.512136] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1849.632953] env[62508]: DEBUG nova.objects.instance [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'pci_requests' on Instance uuid aa7c5176-4420-44b1-9fea-6db7561492c7 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.647242] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.647480] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.647690] env[62508]: DEBUG nova.objects.instance [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'resources' on Instance uuid a6002275-d684-4609-9935-95180cff36d8 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.797177] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.797481] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Received event network-vif-deleted-121e891d-bfe4-4624-a343-95db7afe5757 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1849.797676] env[62508]: INFO nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Neutron deleted interface 121e891d-bfe4-4624-a343-95db7afe5757; detaching it from the instance and deleting it from the info cache [ 1849.797839] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.921402] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e255b5-a3cd-d728-d7ad-421bc990bf05, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.922312] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417873bd-1f40-473d-8114-7ac2cf68090d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.927840] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1849.927840] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52757675-30a6-ccd5-e19b-e7a01dd3b5cf" [ 1849.927840] env[62508]: _type = "Task" [ 1849.927840] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.937129] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52757675-30a6-ccd5-e19b-e7a01dd3b5cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.977417] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776728, 'name': ReconfigVM_Task, 'duration_secs': 0.276705} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.977647] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1849.977909] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1850.001777] env[62508]: DEBUG oslo_vmware.api [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776727, 'name': PowerOnVM_Task, 'duration_secs': 1.022954} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.002037] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1850.002293] env[62508]: INFO nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1850.002488] env[62508]: DEBUG nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1850.003258] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7eec2e-7a48-4e41-957e-06c68bf4aaf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.017011] env[62508]: DEBUG nova.compute.utils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1850.018253] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1850.018418] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1850.055793] env[62508]: DEBUG nova.policy [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '712ef76e285f48e6b5e8f75aa2fee850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce113e91e2b74136a8050ed3acf3557c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1850.088450] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1850.088661] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.088851] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.089151] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.089319] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.089468] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.089618] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.089747] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1850.089890] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.135960] env[62508]: DEBUG nova.objects.base [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1850.136226] env[62508]: DEBUG nova.network.neutron [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1850.245439] env[62508]: DEBUG nova.policy [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c1793957cc840d58a1b6f1f9b38b96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b46df14344794f29a8b0c00408d18159', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1850.301024] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c61af32c-d6f4-4030-bafd-d4e77b35ba8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.311035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e2e762-b115-460d-bc4e-52ead994afba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.348868] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: a6002275-d684-4609-9935-95180cff36d8] Detach interface failed, port_id=121e891d-bfe4-4624-a343-95db7afe5757, reason: Instance a6002275-d684-4609-9935-95180cff36d8 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1850.349599] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1850.349873] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing instance network info cache due to event network-changed-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1850.350152] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquiring lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.350367] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquired lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.350787] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Refreshing network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1850.386327] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87e3d8d-2259-4c87-a651-0036238f4d07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.394339] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a89503-a112-4961-932a-d2960889c749 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.432737] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9bd6b4-fb6f-470f-b4a1-0a0e92b1e7a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.442588] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52757675-30a6-ccd5-e19b-e7a01dd3b5cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009971} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.445037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.445347] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1850.445903] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-813022f7-700c-4d90-b538-b77f67b00491 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.449392] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70d1186-b493-4665-920f-8f45fdd24bc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.454793] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Successfully created port: 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1850.465050] env[62508]: DEBUG nova.compute.provider_tree [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.468430] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1850.468430] env[62508]: value = "task-1776729" [ 1850.468430] env[62508]: _type = "Task" [ 1850.468430] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.477964] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.484388] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85976d6e-d17e-4576-adb1-8b67781a7c10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.504811] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b72ff43-6fe9-4be2-8e06-ed4ec03daed6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.529965] env[62508]: INFO nova.compute.manager [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Took 16.87 seconds to build instance. [ 1850.533897] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1850.535217] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1850.593728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.970108] env[62508]: DEBUG nova.scheduler.client.report [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1850.983654] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521337} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.983953] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1850.984200] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1850.984473] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fb0f8c2-fff4-4974-9d51-45e2c53e32a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.995793] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1850.995793] env[62508]: value = "task-1776730" [ 1850.995793] env[62508]: _type = "Task" [ 1850.995793] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.005985] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776730, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.032574] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3494ab7e-ffc8-4605-874a-cc8c55659b87 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.716s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.177316] env[62508]: DEBUG nova.network.neutron [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1851.254118] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updated VIF entry in instance network info cache for port ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1851.254495] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [{"id": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "address": "fa:16:3e:8b:f5:37", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea63968d-e3", "ovs_interfaceid": "ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.420445] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.420687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.420955] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.421251] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.421471] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.423913] env[62508]: INFO nova.compute.manager [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Terminating instance [ 1851.425963] env[62508]: DEBUG nova.compute.manager [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1851.426209] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.427223] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45796aa5-97cd-45ca-868a-2f08ed456d73 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.435797] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.436078] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dfbd5f3-1dda-44ad-b146-5ebc2195ef4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.443214] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1851.443214] env[62508]: value = "task-1776731" [ 1851.443214] env[62508]: _type = "Task" [ 1851.443214] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.452318] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.479420] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.481982] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.888s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.482248] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.482586] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1851.483345] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a17ce-8ef8-417e-aa73-654c269dbff9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.491893] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9059496-8b31-4f59-b92b-ec22f3c55b83 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.512925] env[62508]: INFO nova.scheduler.client.report [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance a6002275-d684-4609-9935-95180cff36d8 [ 1851.514772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f7b4f0-084b-4193-a775-57558a54a2f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.517164] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776730, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087819} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.519677] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1851.520975] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2916b3c9-de98-4b87-adb7-3c31fe25a75a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.527551] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f203bd9-69e9-4cfc-b536-f47c3bb6f952 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.549780] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1851.551567] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1851.553595] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d7e09e0-e61a-46c3-8944-f6da9a4efe1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.594200] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180024MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1851.594360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.594568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.606566] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1851.606874] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1851.607067] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1851.607269] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1851.607415] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1851.607560] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1851.607763] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1851.607928] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1851.608107] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1851.608276] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1851.608450] env[62508]: DEBUG nova.virt.hardware [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1851.609405] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d77650d-95bc-479c-bafa-4a2d487e043d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.613407] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1851.613407] env[62508]: value = "task-1776732" [ 1851.613407] env[62508]: _type = "Task" [ 1851.613407] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.620588] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0125a1-9a66-41be-a9ef-e537ee4cb713 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.627689] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776732, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.757641] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Releasing lock "refresh_cache-0a4958d5-b9a9-4854-90ca-f19eb34cb15b" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.758016] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.758136] env[62508]: DEBUG nova.compute.manager [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing instance network info cache due to event network-changed-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1851.758321] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.758470] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.758641] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1851.875061] env[62508]: DEBUG nova.compute.manager [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.875399] env[62508]: DEBUG oslo_concurrency.lockutils [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.875501] env[62508]: DEBUG oslo_concurrency.lockutils [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.875698] env[62508]: DEBUG oslo_concurrency.lockutils [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.875871] env[62508]: DEBUG nova.compute.manager [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] No waiting events found dispatching network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1851.877900] env[62508]: WARNING nova.compute.manager [req-b0cd06e1-e8a1-45cc-9697-e91407ba4745 req-0e133841-a900-44d2-8edf-cfc2a7c6e8a5 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received unexpected event network-vif-plugged-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b for instance with vm_state active and task_state None. [ 1851.965248] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776731, 'name': PowerOffVM_Task, 'duration_secs': 0.487071} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.965536] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1851.965704] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1851.965968] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85a0e3f5-8d5f-4282-8766-cbd50ea4903a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.024788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a7778aea-9164-4e70-94cb-e0b8b034c470 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "a6002275-d684-4609-9935-95180cff36d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.215s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.057492] env[62508]: DEBUG nova.network.neutron [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Successfully updated port: 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1852.126996] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.200571] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Successfully updated port: 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1852.203113] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1852.203342] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1852.203555] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] 42f06dc8-e5f6-475e-ba42-15b4abc3139a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.207343] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ee8bba7-a314-47e0-bb48-fa780747ec93 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.213558] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.213906] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.214218] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.221798] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1852.221798] env[62508]: value = "task-1776734" [ 1852.221798] env[62508]: _type = "Task" [ 1852.221798] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.231889] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.521194] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updated VIF entry in instance network info cache for port 0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1852.521611] env[62508]: DEBUG nova.network.neutron [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.560165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.607099] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Applying migration context for instance 544d165c-5054-4c57-a5d9-ac69046c6fbc as it has an incoming, in-progress migration 80926dc1-cf00-4cb3-84ef-5ec854d14f1f. Migration status is post-migrating {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1852.608566] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating resource usage from migration 80926dc1-cf00-4cb3-84ef-5ec854d14f1f [ 1852.624725] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776732, 'name': ReconfigVM_Task, 'duration_secs': 0.829771} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.625008] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Reconfigured VM instance instance-0000006b to attach disk [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1852.625708] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2e0d500-40bb-40b2-9ee6-8c80ead6e6e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.627947] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628103] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance b74d8374-d5ae-456b-9e9e-ec09459a737b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628233] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 0a4958d5-b9a9-4854-90ca-f19eb34cb15b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628352] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aa7c5176-4420-44b1-9fea-6db7561492c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628470] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance d3455694-a157-404f-8153-a9f96bac49a2 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628586] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance cd2424b1-3842-4df4-8636-23417833ea49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628700] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance d3829b04-6d1f-44f0-8b94-30b582506ed4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628813] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 42f06dc8-e5f6-475e-ba42-15b4abc3139a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.628925] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Migration 80926dc1-cf00-4cb3-84ef-5ec854d14f1f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1852.629078] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 544d165c-5054-4c57-a5d9-ac69046c6fbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.629203] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aea987d3-1daf-45f5-84c3-893eb6bdb57a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.629312] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance d58f5593-aafc-43e0-a040-96af10659b70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1852.634987] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1852.634987] env[62508]: value = "task-1776735" [ 1852.634987] env[62508]: _type = "Task" [ 1852.634987] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.643106] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776735, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.704675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.704866] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.705040] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.732988] env[62508]: DEBUG oslo_vmware.api [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266463} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.732988] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.732988] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1852.733114] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.733288] env[62508]: INFO nova.compute.manager [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1852.733494] env[62508]: DEBUG oslo.service.loopingcall [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.733680] env[62508]: DEBUG nova.compute.manager [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1852.733776] env[62508]: DEBUG nova.network.neutron [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.854585] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "d9341873-6ce8-4410-ae11-768c05c59f64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.854763] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.024177] env[62508]: DEBUG oslo_concurrency.lockutils [req-3fb842f8-378e-4188-a96a-66ba2852f71b req-d1deedec-4526-4c70-b5a9-23c189d60210 service nova] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.024719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.024915] env[62508]: DEBUG nova.network.neutron [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.132873] env[62508]: INFO nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance d9341873-6ce8-4410-ae11-768c05c59f64 has allocations against this compute host but is not found in the database. [ 1853.133141] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1853.133323] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2944MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1853.144874] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776735, 'name': Rename_Task, 'duration_secs': 0.176431} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.144987] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1853.145249] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c48bb5a-d8c2-4056-b7eb-cd323c0a9d1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.151317] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1853.151317] env[62508]: value = "task-1776736" [ 1853.151317] env[62508]: _type = "Task" [ 1853.151317] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.165189] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.256978] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1853.261575] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.261746] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.261919] env[62508]: DEBUG nova.network.neutron [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.293649] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103f5e77-5358-46c2-b4ea-e8fbe5de25df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.303507] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f788cd-f64c-40dd-8dd2-9a01bd2dcf12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.336919] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a308a4-1c36-42ea-86af-3cdf78673547 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.346012] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8e14dc-b039-4702-bc47-896fa8d4155d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.364858] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1853.367970] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.422902] env[62508]: DEBUG nova.network.neutron [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.481238] env[62508]: DEBUG nova.network.neutron [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.562496] env[62508]: WARNING nova.network.neutron [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] 8e80b270-1a6d-45fe-9a8b-355e686378f1 already exists in list: networks containing: ['8e80b270-1a6d-45fe-9a8b-355e686378f1']. ignoring it [ 1853.662947] env[62508]: DEBUG oslo_vmware.api [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776736, 'name': PowerOnVM_Task, 'duration_secs': 0.490819} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.663249] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1853.663517] env[62508]: INFO nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1853.663707] env[62508]: DEBUG nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1853.664507] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974a2c24-4f0f-4092-ad49-0b832a336e1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.872734] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1853.899236] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.900312] env[62508]: DEBUG nova.network.neutron [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "address": "fa:16:3e:c1:89:65", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4a8a8e-26", "ovs_interfaceid": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.926097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.926389] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance network_info: |[{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1853.926798] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:f6:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fad22e1-6bfd-45d7-89b1-d953be11abbc', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1853.934539] env[62508]: DEBUG oslo.service.loopingcall [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.934998] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1853.937457] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4b616d3-0542-415c-9ceb-b1c671896b0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.957972] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1853.957972] env[62508]: value = "task-1776737" [ 1853.957972] env[62508]: _type = "Task" [ 1853.957972] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.965750] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776737, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.984029] env[62508]: INFO nova.compute.manager [-] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Took 1.25 seconds to deallocate network for instance. [ 1854.004381] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-changed-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1854.004584] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing instance network info cache due to event network-changed-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1854.004773] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.055745] env[62508]: DEBUG nova.network.neutron [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.181077] env[62508]: INFO nova.compute.manager [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Took 14.72 seconds to build instance. [ 1854.380817] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1854.381616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.786s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.381616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.482s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.382968] env[62508]: INFO nova.compute.claims [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1854.403223] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.403845] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.404017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.404298] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.404478] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Refreshing network info cache for port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.408775] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4c50ac-6fb9-4b3c-b0e1-e940fa7a87da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.426128] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1854.426381] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1854.426542] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.426731] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1854.426875] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.427041] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1854.427256] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1854.427423] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1854.427592] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1854.427757] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1854.427929] env[62508]: DEBUG nova.virt.hardware [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1854.434385] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfiguring VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1854.435651] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fed61b28-8148-45c1-bf59-75e0b0685043 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.453726] env[62508]: DEBUG oslo_vmware.api [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1854.453726] env[62508]: value = "task-1776738" [ 1854.453726] env[62508]: _type = "Task" [ 1854.453726] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.466110] env[62508]: DEBUG oslo_vmware.api [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776738, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.471208] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776737, 'name': CreateVM_Task, 'duration_secs': 0.36797} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.471398] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1854.472044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.472353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.472609] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1854.472853] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c687f991-ccc9-49eb-bfd5-087b266beca3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.477303] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1854.477303] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0e8c9-f7ee-afae-8600-fa69cffa73d5" [ 1854.477303] env[62508]: _type = "Task" [ 1854.477303] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.485590] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0e8c9-f7ee-afae-8600-fa69cffa73d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.490670] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.558441] env[62508]: DEBUG oslo_concurrency.lockutils [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.592376] env[62508]: DEBUG nova.compute.manager [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-changed-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1854.592578] env[62508]: DEBUG nova.compute.manager [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing instance network info cache due to event network-changed-60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1854.592788] env[62508]: DEBUG oslo_concurrency.lockutils [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.593339] env[62508]: DEBUG oslo_concurrency.lockutils [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.593547] env[62508]: DEBUG nova.network.neutron [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.682305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46214220-8155-4c58-b227-f7e3fe130971 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.234s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.964142] env[62508]: DEBUG oslo_vmware.api [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776738, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.989543] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f0e8c9-f7ee-afae-8600-fa69cffa73d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009581} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.990095] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.990423] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1854.990675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.990821] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.991012] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1854.991312] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8acec13-31ae-4c38-8536-5c1dd6948ee1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.998796] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1854.999044] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1855.001645] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15838ceb-aa2a-4036-a6f8-f3bb3bd56cb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.006631] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1855.006631] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dd2551-c8e9-0a2e-9a99-1c372f015f2f" [ 1855.006631] env[62508]: _type = "Task" [ 1855.006631] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.014097] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dd2551-c8e9-0a2e-9a99-1c372f015f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.084321] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f91582e-87e2-4441-9d51-905ecafb1405 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.108248] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cc3089-810e-4e74-a0a0-d88aabeaaa66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.120394] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1855.143096] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updated VIF entry in instance network info cache for port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.143456] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "address": "fa:16:3e:c1:89:65", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4a8a8e-26", "ovs_interfaceid": "7f4a8a8e-266d-408a-b48e-2a6ecb65e47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.328712] env[62508]: DEBUG nova.network.neutron [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updated VIF entry in instance network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.329083] env[62508]: DEBUG nova.network.neutron [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.465612] env[62508]: DEBUG oslo_vmware.api [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776738, 'name': ReconfigVM_Task, 'duration_secs': 0.60429} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.466124] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.466340] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfigured VM to attach interface {{(pid=62508) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1855.516772] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dd2551-c8e9-0a2e-9a99-1c372f015f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.008797} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.519787] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01db4635-ff07-4657-bcfc-8b87368f74cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.524787] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1855.524787] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202736a-9d32-7053-6888-78aae38cf726" [ 1855.524787] env[62508]: _type = "Task" [ 1855.524787] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.534287] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202736a-9d32-7053-6888-78aae38cf726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.547909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee8f69d-26eb-405f-8290-6d99a1f736c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.554796] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4975e38d-b9ff-497d-b8cb-b3a864a68dfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.584547] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b16c2f-991b-4964-b512-a4f9454015af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.591504] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c73a6f-58e6-49d9-9aeb-7f5cacda1816 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.605703] env[62508]: DEBUG nova.compute.provider_tree [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.627043] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1855.627547] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a91a253c-74ca-4742-b187-0c5426bab460 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.633961] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1855.633961] env[62508]: value = "task-1776739" [ 1855.633961] env[62508]: _type = "Task" [ 1855.633961] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.641593] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.646162] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.646426] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1855.646646] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.646896] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.647121] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.647336] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] No waiting events found dispatching network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1855.647530] env[62508]: WARNING nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received unexpected event network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc for instance with vm_state building and task_state spawning. [ 1855.647710] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1855.647869] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing instance network info cache due to event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1855.648064] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.648207] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.648372] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.832033] env[62508]: DEBUG oslo_concurrency.lockutils [req-ae4af8d7-495d-4188-a9d0-d54019a90749 req-9a34949e-bc48-4778-b078-313f7ca59895 service nova] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.971597] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9f7db2a8-93ae-48ea-97b1-bed272d08100 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.943s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.035048] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202736a-9d32-7053-6888-78aae38cf726, 'name': SearchDatastore_Task, 'duration_secs': 0.009606} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.035048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.035315] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1856.035398] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a64f6bd-b3f9-4495-b68b-73c1bd22f184 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.042568] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1856.042568] env[62508]: value = "task-1776740" [ 1856.042568] env[62508]: _type = "Task" [ 1856.042568] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.049776] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.108713] env[62508]: DEBUG nova.scheduler.client.report [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1856.144321] env[62508]: DEBUG oslo_vmware.api [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776739, 'name': PowerOnVM_Task, 'duration_secs': 0.364086} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.144577] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1856.144763] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-91efe864-2410-4d7d-a3a8-a262513fa8c5 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance '544d165c-5054-4c57-a5d9-ac69046c6fbc' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.371129] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updated VIF entry in instance network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1856.371568] env[62508]: DEBUG nova.network.neutron [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.552713] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776740, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446082} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.552980] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1856.553227] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1856.553493] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c69f886-5445-4520-9ab1-8f3ba43b0781 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.560206] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1856.560206] env[62508]: value = "task-1776741" [ 1856.560206] env[62508]: _type = "Task" [ 1856.560206] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.567744] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.614861] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.615450] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1856.618730] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.128s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.618964] env[62508]: DEBUG nova.objects.instance [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid 42f06dc8-e5f6-475e-ba42-15b4abc3139a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1856.874622] env[62508]: DEBUG oslo_concurrency.lockutils [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.874898] env[62508]: DEBUG nova.compute.manager [req-0b765a6d-5131-48b2-acfd-f942b875aefe req-56a499c1-90df-403a-8005-c56773c5be85 service nova] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Received event network-vif-deleted-72d5d39c-6144-4a83-b087-58b9ba4ffc16 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.072210] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776741, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070218} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.072521] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1857.073350] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b87e4ff-b2b2-4c1f-bd61-ef8e018c56be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.096070] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1857.096368] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f70be94-bae8-4121-ae16-89525fe99caf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.115900] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1857.115900] env[62508]: value = "task-1776742" [ 1857.115900] env[62508]: _type = "Task" [ 1857.115900] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.126831] env[62508]: DEBUG nova.compute.utils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1857.130852] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.131931] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1857.132439] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1857.177119] env[62508]: DEBUG nova.policy [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1857.242748] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.243140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.320229] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1790fdd-30e6-44a0-b2b4-9b52e552b367 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.329216] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aac1f3c-39b8-4ee9-be07-f7498505a86b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.360865] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b1e2c3-eb17-4153-8e2f-e287686be8eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.369744] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1099d90f-a4db-4981-b2b6-cf5149056179 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.386239] env[62508]: DEBUG nova.compute.provider_tree [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.461293] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Successfully created port: 13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1857.629752] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776742, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.636144] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1857.746400] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.746629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.748236] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a10a119-8ff2-4640-87d2-4af468bb01ab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.767312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a158b8-ea2d-4c1a-b8ad-a315960f58a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.797883] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfiguring VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1857.798211] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a194ce9-5e7f-482a-8ddc-641ff55a1438 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.819199] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1857.819199] env[62508]: value = "task-1776743" [ 1857.819199] env[62508]: _type = "Task" [ 1857.819199] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.828296] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.889585] env[62508]: DEBUG nova.scheduler.client.report [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1858.127032] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776742, 'name': ReconfigVM_Task, 'duration_secs': 0.910059} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.127431] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Reconfigured VM instance instance-0000006c to attach disk [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1858.128187] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16212ecf-b1e2-4c51-8c09-0ed51c482d9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.134665] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1858.134665] env[62508]: value = "task-1776744" [ 1858.134665] env[62508]: _type = "Task" [ 1858.134665] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.146667] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776744, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.147506] env[62508]: DEBUG nova.network.neutron [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Port e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1858.147734] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.147901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.148075] env[62508]: DEBUG nova.network.neutron [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.329656] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.394950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.426592] env[62508]: INFO nova.scheduler.client.report [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance 42f06dc8-e5f6-475e-ba42-15b4abc3139a [ 1858.646170] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776744, 'name': Rename_Task, 'duration_secs': 0.197212} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.646463] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1858.646710] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb16abe4-6d2d-4827-84fb-0cdaba2c5932 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.650456] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1858.655304] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1858.655304] env[62508]: value = "task-1776745" [ 1858.655304] env[62508]: _type = "Task" [ 1858.655304] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.663204] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.680384] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1858.680643] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1858.680791] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1858.680975] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1858.681139] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1858.681312] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1858.681534] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1858.681694] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1858.681856] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1858.682030] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1858.682212] env[62508]: DEBUG nova.virt.hardware [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1858.683104] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f48b534-88fe-4e76-a54a-384947cd02ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.692650] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9953048-fb62-4ad6-aa0f-63b35b4a90a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.833470] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.933790] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1cccd280-8bf4-4828-9504-8799affa636d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "42f06dc8-e5f6-475e-ba42-15b4abc3139a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.513s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.971392] env[62508]: DEBUG nova.network.neutron [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.116659] env[62508]: DEBUG nova.compute.manager [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Received event network-vif-plugged-13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.116885] env[62508]: DEBUG oslo_concurrency.lockutils [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] Acquiring lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.117310] env[62508]: DEBUG oslo_concurrency.lockutils [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] Lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.117310] env[62508]: DEBUG oslo_concurrency.lockutils [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] Lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.117432] env[62508]: DEBUG nova.compute.manager [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] No waiting events found dispatching network-vif-plugged-13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1859.117586] env[62508]: WARNING nova.compute.manager [req-4bb1825a-33fb-4389-9075-5f659fd43370 req-a716443e-33df-4f10-b3eb-fa0219118610 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Received unexpected event network-vif-plugged-13de194a-6e4e-4af3-bdef-70dcae738549 for instance with vm_state building and task_state spawning. [ 1859.166810] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776745, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.217491] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Successfully updated port: 13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1859.330136] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.474341] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.666516] env[62508]: DEBUG oslo_vmware.api [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776745, 'name': PowerOnVM_Task, 'duration_secs': 0.980566} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.666801] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1859.667013] env[62508]: INFO nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1859.667206] env[62508]: DEBUG nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1859.668080] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3051bf-5bce-438c-bad0-a6a2c059dbe3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.722696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.722858] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.723018] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.831600] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.978366] env[62508]: DEBUG nova.compute.manager [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62508) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1859.978603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.978836] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.178237] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.178539] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.186928] env[62508]: INFO nova.compute.manager [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Took 12.99 seconds to build instance. [ 1860.256023] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1860.333850] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.391447] env[62508]: DEBUG nova.network.neutron [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Updating instance_info_cache with network_info: [{"id": "13de194a-6e4e-4af3-bdef-70dcae738549", "address": "fa:16:3e:0a:7d:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13de194a-6e", "ovs_interfaceid": "13de194a-6e4e-4af3-bdef-70dcae738549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.481945] env[62508]: DEBUG nova.objects.instance [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'migration_context' on Instance uuid 544d165c-5054-4c57-a5d9-ac69046c6fbc {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.684939] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1860.688444] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4d571e7e-0fe3-492f-95d2-18bc6da3b3d5 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.500s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.833160] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.894423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.894778] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Instance network_info: |[{"id": "13de194a-6e4e-4af3-bdef-70dcae738549", "address": "fa:16:3e:0a:7d:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13de194a-6e", "ovs_interfaceid": "13de194a-6e4e-4af3-bdef-70dcae738549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1860.895219] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:7d:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13de194a-6e4e-4af3-bdef-70dcae738549', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1860.902867] env[62508]: DEBUG oslo.service.loopingcall [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1860.903081] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1860.903314] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a08b8f5-c881-451f-bbb4-0e6b38596cdb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.927466] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1860.927466] env[62508]: value = "task-1776746" [ 1860.927466] env[62508]: _type = "Task" [ 1860.927466] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.935055] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776746, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.148384] env[62508]: DEBUG nova.compute.manager [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Received event network-changed-13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1861.148384] env[62508]: DEBUG nova.compute.manager [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Refreshing instance network info cache due to event network-changed-13de194a-6e4e-4af3-bdef-70dcae738549. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1861.148601] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Acquiring lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.148659] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Acquired lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.148797] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Refreshing network info cache for port 13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.154347] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd00446-072a-466c-bb87-b29ce04ce98d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.165363] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92dc761-0584-4451-97e3-cfb1088d734d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.209766] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8681375d-d3ca-455f-a5ef-38738fd03c18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.221012] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ff72cb-b732-4708-a42c-cbfec62778b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.227351] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.237801] env[62508]: DEBUG nova.compute.provider_tree [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.336220] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.437614] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776746, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.741982] env[62508]: DEBUG nova.scheduler.client.report [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1861.835616] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.888111] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Updated VIF entry in instance network info cache for port 13de194a-6e4e-4af3-bdef-70dcae738549. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1861.888511] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Updating instance_info_cache with network_info: [{"id": "13de194a-6e4e-4af3-bdef-70dcae738549", "address": "fa:16:3e:0a:7d:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13de194a-6e", "ovs_interfaceid": "13de194a-6e4e-4af3-bdef-70dcae738549", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.938523] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776746, 'name': CreateVM_Task, 'duration_secs': 0.54312} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.938664] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1861.939356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.939527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.939931] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1861.940197] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c1da4f-3779-4e61-8c24-549f000d7416 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.945361] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1861.945361] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8039b-4094-f930-25e9-666f5faf293a" [ 1861.945361] env[62508]: _type = "Task" [ 1861.945361] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.953644] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8039b-4094-f930-25e9-666f5faf293a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.336898] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.391286] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Releasing lock "refresh_cache-d9341873-6ce8-4410-ae11-768c05c59f64" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.391628] env[62508]: DEBUG nova.compute.manager [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1862.391807] env[62508]: DEBUG nova.compute.manager [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing instance network info cache due to event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1862.392030] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.392182] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.392428] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.456105] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f8039b-4094-f930-25e9-666f5faf293a, 'name': SearchDatastore_Task, 'duration_secs': 0.010521} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.456370] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.456610] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.456844] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.456992] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.457192] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.457459] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84ce91b3-e9fe-43de-a7c5-0dbe5763d81a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.468617] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.468794] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.469574] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9178120-9b66-4556-8681-adffd1d9966b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.475136] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1862.475136] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252a2ba-7892-8195-f18f-5b9f0aa6b249" [ 1862.475136] env[62508]: _type = "Task" [ 1862.475136] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.482608] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252a2ba-7892-8195-f18f-5b9f0aa6b249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.754678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.776s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.760663] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.533s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.762302] env[62508]: INFO nova.compute.claims [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1862.836910] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.987163] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252a2ba-7892-8195-f18f-5b9f0aa6b249, 'name': SearchDatastore_Task, 'duration_secs': 0.040041} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.987952] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c61351b-90ec-47d5-b374-3780570d24c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.993887] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1862.993887] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52caf492-ec39-c159-d6e3-f6f3a95051a3" [ 1862.993887] env[62508]: _type = "Task" [ 1862.993887] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.001824] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52caf492-ec39-c159-d6e3-f6f3a95051a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.120553] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updated VIF entry in instance network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1863.120934] env[62508]: DEBUG nova.network.neutron [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.337557] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.454443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.454719] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.454923] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "d3455694-a157-404f-8153-a9f96bac49a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.455123] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.455297] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.457474] env[62508]: INFO nova.compute.manager [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Terminating instance [ 1863.459333] env[62508]: DEBUG nova.compute.manager [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1863.459555] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1863.459796] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fb0cdbe-ed87-42f0-95e3-106228bd685e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.467552] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1863.467552] env[62508]: value = "task-1776747" [ 1863.467552] env[62508]: _type = "Task" [ 1863.467552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.476156] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.503784] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52caf492-ec39-c159-d6e3-f6f3a95051a3, 'name': SearchDatastore_Task, 'duration_secs': 0.040279} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.504039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.504299] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d9341873-6ce8-4410-ae11-768c05c59f64/d9341873-6ce8-4410-ae11-768c05c59f64.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.504559] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78034e5f-4eb4-42d8-acd1-27d1a2265260 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.510874] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1863.510874] env[62508]: value = "task-1776748" [ 1863.510874] env[62508]: _type = "Task" [ 1863.510874] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.518247] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.623944] env[62508]: DEBUG oslo_concurrency.lockutils [req-4705b246-bcbd-4c54-bda0-9353efd6ccc3 req-952db25d-0bb7-41cc-9bfd-c5c3f1168ac4 service nova] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.840923] env[62508]: DEBUG oslo_vmware.api [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776743, 'name': ReconfigVM_Task, 'duration_secs': 5.833877} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.841319] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.841642] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Reconfigured VM to detach interface {{(pid=62508) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1863.973970] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe277645-02d3-4f5f-8e44-c4dc14128c9e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.985112] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316ed087-39d5-4526-924d-18973bf99de9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.989443] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776747, 'name': PowerOffVM_Task, 'duration_secs': 0.18028} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.989935] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1863.990212] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1863.990421] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368806', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'name': 'volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd3455694-a157-404f-8153-a9f96bac49a2', 'attached_at': '2024-12-11T22:18:02.000000', 'detached_at': '', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'serial': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1863.991794] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8185bc0-cb6e-4e7f-85c5-1e53c760e29c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.026569] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfddd48d-30e4-4ff3-bf49-e12a9586072a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.045108] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ae8e4a-2b10-486b-bc1c-df81439f6d10 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.053284] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.054022] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] d9341873-6ce8-4410-ae11-768c05c59f64/d9341873-6ce8-4410-ae11-768c05c59f64.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1864.054242] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1864.055646] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e244cac-2b7f-4b4c-a883-34454a7b3e58 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.060760] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a275d4e7-caa2-4ccc-a3bf-378e7d43b977 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.063593] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7f06e1-9b41-4df6-9b81-87c54d5bc547 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.073887] env[62508]: DEBUG nova.compute.provider_tree [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1864.077878] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1864.077878] env[62508]: value = "task-1776749" [ 1864.077878] env[62508]: _type = "Task" [ 1864.077878] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.095418] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d980e8d0-56fa-4eef-9a7c-bbf9185f1aa1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.111130] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] The volume has not been displaced from its original location: [datastore1] volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2/volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1864.116444] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1864.120133] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7d53622-f29e-4be0-83f3-648b820bfdc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.133898] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.139235] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1864.139235] env[62508]: value = "task-1776750" [ 1864.139235] env[62508]: _type = "Task" [ 1864.139235] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.147047] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776750, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.298984] env[62508]: INFO nova.compute.manager [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Swapping old allocation on dict_keys(['5d5b4923-a8ac-4688-9f86-2405bd3406a9']) held by migration 80926dc1-cf00-4cb3-84ef-5ec854d14f1f for instance [ 1864.326203] env[62508]: DEBUG nova.scheduler.client.report [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Overwriting current allocation {'allocations': {'5d5b4923-a8ac-4688-9f86-2405bd3406a9': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': 'ce0dd059301e41abb3758625d38e435e', 'user_id': '2561183ef9c54615988c33906fc5f84e', 'consumer_generation': 1} on consumer 544d165c-5054-4c57-a5d9-ac69046c6fbc {{(pid=62508) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1864.415894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.416104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.416276] env[62508]: DEBUG nova.network.neutron [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1864.577263] env[62508]: DEBUG nova.scheduler.client.report [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1864.604482] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077681} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.604840] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1864.605996] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82af5c2d-d186-45c6-9b8e-688cbb146b53 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.643168] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] d9341873-6ce8-4410-ae11-768c05c59f64/d9341873-6ce8-4410-ae11-768c05c59f64.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.643652] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22dfcfe3-18d9-43d9-9bf7-4a925fd644b1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.682105] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776750, 'name': ReconfigVM_Task, 'duration_secs': 0.186512} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.683735] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1864.690794] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1864.690794] env[62508]: value = "task-1776751" [ 1864.690794] env[62508]: _type = "Task" [ 1864.690794] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.691548] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28394cc4-fc4a-4d6d-a7da-7d1c48b5e767 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.708395] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1864.708395] env[62508]: value = "task-1776752" [ 1864.708395] env[62508]: _type = "Task" [ 1864.708395] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.711534] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.719069] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.083105] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.083647] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1865.134794] env[62508]: DEBUG nova.network.neutron [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [{"id": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "address": "fa:16:3e:50:39:a4", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bb9906-0d", "ovs_interfaceid": "e0bb9906-0d2a-4bdb-bbe4-5a3074c66499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.175656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.175858] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquired lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.176091] env[62508]: DEBUG nova.network.neutron [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.210938] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.219110] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776752, 'name': ReconfigVM_Task, 'duration_secs': 0.474646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.219451] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368806', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'name': 'volume-3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd3455694-a157-404f-8153-a9f96bac49a2', 'attached_at': '2024-12-11T22:18:02.000000', 'detached_at': '', 'volume_id': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2', 'serial': '3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1865.219755] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1865.220577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14185b1-03d5-4390-9597-f8752debde85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.227470] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1865.227746] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b78f49d0-7c6f-4eef-aa6c-e016133aac9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.307214] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1865.307448] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1865.307631] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] d3455694-a157-404f-8153-a9f96bac49a2 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1865.307898] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e08e7399-5989-4c5b-9ea6-da2dfd1c52cd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.313598] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1865.313598] env[62508]: value = "task-1776754" [ 1865.313598] env[62508]: _type = "Task" [ 1865.313598] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.321585] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.588598] env[62508]: DEBUG nova.compute.utils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1865.590161] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1865.590334] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1865.638103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-544d165c-5054-4c57-a5d9-ac69046c6fbc" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.638564] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1865.638845] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cda1485c-a939-45f9-82e1-dd9755dd13f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.645947] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1865.645947] env[62508]: value = "task-1776755" [ 1865.645947] env[62508]: _type = "Task" [ 1865.645947] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.653969] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.659944] env[62508]: DEBUG nova.policy [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1865.669443] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.669691] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.669902] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.670106] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.670280] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.672670] env[62508]: INFO nova.compute.manager [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Terminating instance [ 1865.674451] env[62508]: DEBUG nova.compute.manager [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1865.674646] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1865.675478] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45714118-71a0-4a03-8e01-75ba22672f62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.685661] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1865.685946] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35815e44-04c9-458b-a77d-92ebbd9cf999 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.693530] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1865.693530] env[62508]: value = "task-1776756" [ 1865.693530] env[62508]: _type = "Task" [ 1865.693530] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.702393] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.711202] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776751, 'name': ReconfigVM_Task, 'duration_secs': 0.670435} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.711525] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Reconfigured VM instance instance-0000006d to attach disk [datastore1] d9341873-6ce8-4410-ae11-768c05c59f64/d9341873-6ce8-4410-ae11-768c05c59f64.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1865.712218] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43b3a85a-bf31-49a3-a361-d070ea0a6cc3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.718378] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1865.718378] env[62508]: value = "task-1776757" [ 1865.718378] env[62508]: _type = "Task" [ 1865.718378] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.726601] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776757, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.826349] env[62508]: DEBUG oslo_vmware.api [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306265} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.826717] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1865.826931] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1865.827130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1865.827330] env[62508]: INFO nova.compute.manager [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Took 2.37 seconds to destroy the instance on the hypervisor. [ 1865.827580] env[62508]: DEBUG oslo.service.loopingcall [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.830181] env[62508]: DEBUG nova.compute.manager [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1865.830309] env[62508]: DEBUG nova.network.neutron [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.070543] env[62508]: INFO nova.network.neutron [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Port 7f4a8a8e-266d-408a-b48e-2a6ecb65e47b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1866.071383] env[62508]: DEBUG nova.network.neutron [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [{"id": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "address": "fa:16:3e:62:2e:ea", "network": {"id": "8e80b270-1a6d-45fe-9a8b-355e686378f1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1174464703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b46df14344794f29a8b0c00408d18159", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5955b6-9b", "ovs_interfaceid": "0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.093827] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1866.120705] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Successfully created port: 420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1866.156643] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776755, 'name': PowerOffVM_Task, 'duration_secs': 0.209244} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.156904] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1866.157571] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1866.157779] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1866.157932] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1866.158218] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1866.158407] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1866.158587] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1866.158870] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1866.160946] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1866.160946] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1866.160946] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1866.160946] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1866.165369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49c7dd4b-6255-4d64-b140-411ec47ae65b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.183601] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1866.183601] env[62508]: value = "task-1776758" [ 1866.183601] env[62508]: _type = "Task" [ 1866.183601] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.192321] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.202222] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776756, 'name': PowerOffVM_Task, 'duration_secs': 0.273471} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.202526] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1866.202834] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1866.203128] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d51d540-a646-4f6a-ae9b-d5658e69ed64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.227869] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776757, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.286893] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1866.287192] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1866.287397] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleting the datastore file [datastore1] aa7c5176-4420-44b1-9fea-6db7561492c7 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1866.288683] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-185f6c43-008f-427d-8431-8d2a8f8927a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.293140] env[62508]: DEBUG nova.compute.manager [req-2b6f820e-021b-41f3-b1b9-8b054f073cae req-5d426019-1a3a-49e2-9af6-01d06f145b43 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Received event network-vif-deleted-ed5b1d50-d456-43d1-887a-96dcb4f42cec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.293405] env[62508]: INFO nova.compute.manager [req-2b6f820e-021b-41f3-b1b9-8b054f073cae req-5d426019-1a3a-49e2-9af6-01d06f145b43 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Neutron deleted interface ed5b1d50-d456-43d1-887a-96dcb4f42cec; detaching it from the instance and deleting it from the info cache [ 1866.293664] env[62508]: DEBUG nova.network.neutron [req-2b6f820e-021b-41f3-b1b9-8b054f073cae req-5d426019-1a3a-49e2-9af6-01d06f145b43 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.302356] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1866.302356] env[62508]: value = "task-1776760" [ 1866.302356] env[62508]: _type = "Task" [ 1866.302356] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.314157] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.573694] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Releasing lock "refresh_cache-aa7c5176-4420-44b1-9fea-6db7561492c7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.700022] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776758, 'name': ReconfigVM_Task, 'duration_secs': 0.15978} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.700022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbaa2d93-4720-4504-87b0-d69de5167501 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.729600] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1866.730082] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1866.730396] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1866.730748] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1866.731044] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1866.731334] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1866.731747] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1866.732077] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1866.732417] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1866.732742] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1866.733092] env[62508]: DEBUG nova.virt.hardware [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1866.740893] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ac866b4-f632-4506-8c3a-7e8c4d476259 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.753119] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776757, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.755551] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1866.755551] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524ac347-be0b-63ab-6f66-414b130a2737" [ 1866.755551] env[62508]: _type = "Task" [ 1866.755551] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.760874] env[62508]: DEBUG nova.network.neutron [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.769756] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524ac347-be0b-63ab-6f66-414b130a2737, 'name': SearchDatastore_Task, 'duration_secs': 0.011558} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.783751] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1866.785069] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac70b33f-9720-437c-aa76-bab423605067 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.810348] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-569b06ce-4974-4619-9450-f5892a850904 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.826874] env[62508]: DEBUG oslo_vmware.api [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.830424] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1866.830779] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1866.831151] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1866.831502] env[62508]: INFO nova.compute.manager [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1866.831918] env[62508]: DEBUG oslo.service.loopingcall [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.836018] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1866.836018] env[62508]: value = "task-1776761" [ 1866.836018] env[62508]: _type = "Task" [ 1866.836018] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.836018] env[62508]: DEBUG nova.compute.manager [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1866.836018] env[62508]: DEBUG nova.network.neutron [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.841358] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427aef61-81a4-4399-8992-30c3271eb2a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.861940] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.879754] env[62508]: DEBUG nova.compute.manager [req-2b6f820e-021b-41f3-b1b9-8b054f073cae req-5d426019-1a3a-49e2-9af6-01d06f145b43 service nova] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Detach interface failed, port_id=ed5b1d50-d456-43d1-887a-96dcb4f42cec, reason: Instance d3455694-a157-404f-8153-a9f96bac49a2 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1867.078149] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fc662deb-4065-4813-9b39-5116ddd25a5f tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "interface-aa7c5176-4420-44b1-9fea-6db7561492c7-7f4a8a8e-266d-408a-b48e-2a6ecb65e47b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.835s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.103998] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1867.135089] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1867.135391] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1867.135558] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1867.135751] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1867.135896] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1867.136102] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1867.136367] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1867.136569] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1867.136749] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1867.136915] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1867.137100] env[62508]: DEBUG nova.virt.hardware [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1867.138016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7340b7a8-073d-4743-bbcc-f787949df81a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.146573] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd3a79d-7f34-4cad-a8a2-dfdf0a5d20d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.238052] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776757, 'name': Rename_Task, 'duration_secs': 1.141613} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.238338] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1867.238592] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1085b033-ffd3-4d95-978b-e4011ccb38fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.245067] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1867.245067] env[62508]: value = "task-1776762" [ 1867.245067] env[62508]: _type = "Task" [ 1867.245067] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.254053] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.263878] env[62508]: INFO nova.compute.manager [-] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Took 1.43 seconds to deallocate network for instance. [ 1867.346735] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.603176] env[62508]: DEBUG nova.compute.manager [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Received event network-vif-plugged-420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.603405] env[62508]: DEBUG oslo_concurrency.lockutils [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] Acquiring lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.603616] env[62508]: DEBUG oslo_concurrency.lockutils [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.603788] env[62508]: DEBUG oslo_concurrency.lockutils [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.603954] env[62508]: DEBUG nova.compute.manager [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] No waiting events found dispatching network-vif-plugged-420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1867.606572] env[62508]: WARNING nova.compute.manager [req-58183159-8146-41e3-9f28-22843cd267ca req-ededf8f2-70e1-4227-81ad-fad35b5613eb service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Received unexpected event network-vif-plugged-420ade26-2a3d-41e8-8bcd-3dd540bb74d6 for instance with vm_state building and task_state spawning. [ 1867.656815] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.657449] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.696619] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Successfully updated port: 420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1867.755985] env[62508]: DEBUG oslo_vmware.api [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776762, 'name': PowerOnVM_Task, 'duration_secs': 0.492233} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.756306] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1867.756564] env[62508]: INFO nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1867.756830] env[62508]: DEBUG nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1867.757820] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81400765-ed36-40ea-b660-bc0da85e6a07 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.808257] env[62508]: INFO nova.compute.manager [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Took 0.54 seconds to detach 1 volumes for instance. [ 1867.810566] env[62508]: DEBUG nova.compute.manager [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Deleting volume: 3ce8f8cc-9dab-4bfd-8d92-7a1fbd8ce3c2 {{(pid=62508) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1867.858704] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776761, 'name': ReconfigVM_Task, 'duration_secs': 0.713367} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.858982] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1867.864211] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87263f8-eca3-482f-9264-9b29a225ec17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.895073] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1867.895658] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87d6b723-9d44-46f9-987a-e809f44bf4b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.914999] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1867.914999] env[62508]: value = "task-1776764" [ 1867.914999] env[62508]: _type = "Task" [ 1867.914999] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.923291] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.109686] env[62508]: DEBUG nova.network.neutron [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.160874] env[62508]: DEBUG nova.compute.utils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.199967] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.200186] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.200301] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1868.276900] env[62508]: INFO nova.compute.manager [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Took 14.41 seconds to build instance. [ 1868.324270] env[62508]: DEBUG nova.compute.manager [req-f2d6c936-779f-454e-baae-e746433e72ab req-e74dc491-633c-464c-8b7d-f4caadd5ed28 service nova] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Received event network-vif-deleted-0e5955b6-9bc6-41ad-a45a-08a8f6b5afa3 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1868.354937] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.355193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.355441] env[62508]: DEBUG nova.objects.instance [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'resources' on Instance uuid d3455694-a157-404f-8153-a9f96bac49a2 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.424610] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.603748] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f6109-00c9-4bab-81bc-011bd1e4f9be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.610602] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Suspending the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1868.610835] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-7bdba979-4ac6-4c0b-8529-70a30168d72f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.612675] env[62508]: INFO nova.compute.manager [-] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Took 1.78 seconds to deallocate network for instance. [ 1868.620447] env[62508]: DEBUG oslo_vmware.api [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1868.620447] env[62508]: value = "task-1776765" [ 1868.620447] env[62508]: _type = "Task" [ 1868.620447] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.629529] env[62508]: DEBUG oslo_vmware.api [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776765, 'name': SuspendVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.664108] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.737021] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1868.778726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-de9f1dec-8195-4178-bbbf-298c5cf32674 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.924s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.894630] env[62508]: DEBUG nova.network.neutron [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Updating instance_info_cache with network_info: [{"id": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "address": "fa:16:3e:f1:f0:bc", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420ade26-2a", "ovs_interfaceid": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.929547] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776764, 'name': ReconfigVM_Task, 'duration_secs': 0.764028} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.932276] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc/544d165c-5054-4c57-a5d9-ac69046c6fbc.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1868.933639] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0df04b8-6b31-49c6-a6f7-c0192299cce1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.954970] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d149bada-aca8-41d7-a6ca-09f77e2edf28 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.976743] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb58e255-b3f1-4613-8b61-18d0e9a0d1cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.998635] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e42f3f7-7040-4555-9e86-9a69e22fb3d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.006120] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1869.006406] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0924859-3099-4f59-b0aa-aa856af54a12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.015262] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1869.015262] env[62508]: value = "task-1776766" [ 1869.015262] env[62508]: _type = "Task" [ 1869.015262] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.023405] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.062785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ba7b66-b771-46c9-9f2a-3a307bb5b989 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.070202] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c59365-cb7e-4ee6-9298-681666e1f840 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.102091] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edca1a28-a57b-4d6b-a883-867ca45220ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.109822] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3995c2-4f74-475a-9f04-46bf3dede589 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.124096] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.124647] env[62508]: DEBUG nova.compute.provider_tree [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.133941] env[62508]: DEBUG oslo_vmware.api [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776765, 'name': SuspendVM_Task} progress is 54%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.397394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.398381] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Instance network_info: |[{"id": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "address": "fa:16:3e:f1:f0:bc", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420ade26-2a", "ovs_interfaceid": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1869.398838] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:f0:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '420ade26-2a3d-41e8-8bcd-3dd540bb74d6', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.407076] env[62508]: DEBUG oslo.service.loopingcall [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.407329] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1869.407565] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b052a9a-e066-4122-8935-8a4bbbdf8d21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.428763] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1869.428763] env[62508]: value = "task-1776767" [ 1869.428763] env[62508]: _type = "Task" [ 1869.428763] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.441199] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776767, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.525157] env[62508]: DEBUG oslo_vmware.api [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776766, 'name': PowerOnVM_Task, 'duration_secs': 0.386415} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.525429] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1869.630936] env[62508]: DEBUG nova.scheduler.client.report [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1869.635059] env[62508]: DEBUG nova.compute.manager [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Received event network-changed-420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1869.635260] env[62508]: DEBUG nova.compute.manager [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Refreshing instance network info cache due to event network-changed-420ade26-2a3d-41e8-8bcd-3dd540bb74d6. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1869.635476] env[62508]: DEBUG oslo_concurrency.lockutils [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] Acquiring lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.635622] env[62508]: DEBUG oslo_concurrency.lockutils [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] Acquired lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.635786] env[62508]: DEBUG nova.network.neutron [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Refreshing network info cache for port 420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1869.642857] env[62508]: DEBUG oslo_vmware.api [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776765, 'name': SuspendVM_Task, 'duration_secs': 0.708783} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.643119] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Suspended the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1869.643300] env[62508]: DEBUG nova.compute.manager [None req-4b5c73f9-ffd6-4bd7-befd-776b055ec6a1 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1869.644577] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e012fe-fbab-48d5-afba-da6f91fc03e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.736500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.736766] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.737019] env[62508]: INFO nova.compute.manager [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attaching volume 464c4a86-7294-4ef5-8433-0176e235cc8b to /dev/sdb [ 1869.768185] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89785f81-47a2-40a9-a003-4c94fac704ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.777145] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846414ab-2d08-4654-bbf5-9e0f1e32dce4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.791764] env[62508]: DEBUG nova.virt.block_device [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating existing volume attachment record: 4cfbb56d-c202-418c-b975-937be91e6ede {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1869.937904] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776767, 'name': CreateVM_Task, 'duration_secs': 0.464816} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.938113] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1869.938792] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.938961] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.939308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1869.939613] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeeeb060-c8b9-4ddb-9d71-fbbd6b4c4d8c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.944199] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1869.944199] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac9220-79de-985c-93d0-3c908dfd0654" [ 1869.944199] env[62508]: _type = "Task" [ 1869.944199] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.953944] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac9220-79de-985c-93d0-3c908dfd0654, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.138231] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.783s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.142604] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.019s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.142880] env[62508]: DEBUG nova.objects.instance [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'resources' on Instance uuid aa7c5176-4420-44b1-9fea-6db7561492c7 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1870.164928] env[62508]: INFO nova.scheduler.client.report [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocations for instance d3455694-a157-404f-8153-a9f96bac49a2 [ 1870.371570] env[62508]: DEBUG nova.network.neutron [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Updated VIF entry in instance network info cache for port 420ade26-2a3d-41e8-8bcd-3dd540bb74d6. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1870.371980] env[62508]: DEBUG nova.network.neutron [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Updating instance_info_cache with network_info: [{"id": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "address": "fa:16:3e:f1:f0:bc", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420ade26-2a", "ovs_interfaceid": "420ade26-2a3d-41e8-8bcd-3dd540bb74d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.456705] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac9220-79de-985c-93d0-3c908dfd0654, 'name': SearchDatastore_Task, 'duration_secs': 0.011776} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.457578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.457578] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1870.457578] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.458055] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.458055] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.458162] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-633b94c6-9881-44a2-aca6-02faba512440 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.467015] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.467243] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1870.468031] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f3d6bcb-d981-417a-8a78-445db6717be1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.474064] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1870.474064] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d61dc7-8746-9e4b-b80c-a75c10e5e825" [ 1870.474064] env[62508]: _type = "Task" [ 1870.474064] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.481067] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d61dc7-8746-9e4b-b80c-a75c10e5e825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.536655] env[62508]: INFO nova.compute.manager [None req-70789e3e-9d9c-4f8b-948d-320e8d27b456 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance to original state: 'active' [ 1870.672704] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1a80b48-eeb8-4d71-a5b7-b98f1d5ba8b8 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "d3455694-a157-404f-8153-a9f96bac49a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.218s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.790312] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9588f0de-6bcf-4239-a23b-59329e15bb2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.797953] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f88266-db13-4c7d-a22b-7841200c6dbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.827791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0141a246-be2b-4756-8c21-cded93f669e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.835094] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af78a3cd-f975-4e50-966f-446e7d83e851 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.848662] env[62508]: DEBUG nova.compute.provider_tree [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1870.874692] env[62508]: DEBUG oslo_concurrency.lockutils [req-27270540-88bd-448f-ad00-4b5b91f75e76 req-a8e78f18-836f-4202-b0ba-b5d7463bd42e service nova] Releasing lock "refresh_cache-a89db7c6-b0d9-44c0-8015-8a96f09200f6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.984499] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d61dc7-8746-9e4b-b80c-a75c10e5e825, 'name': SearchDatastore_Task, 'duration_secs': 0.00885} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.985321] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c4531e4-c80b-46c3-bea8-6420894062ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.990990] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1870.990990] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202ab2f-719e-75ae-9195-4e27a367b880" [ 1870.990990] env[62508]: _type = "Task" [ 1870.990990] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.999402] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202ab2f-719e-75ae-9195-4e27a367b880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.017231] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "d9341873-6ce8-4410-ae11-768c05c59f64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.017536] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.017759] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.018130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.018130] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.023698] env[62508]: INFO nova.compute.manager [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Terminating instance [ 1871.027818] env[62508]: DEBUG nova.compute.manager [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1871.028041] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1871.028890] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586685c9-aeb3-484e-9129-18ffc93a039a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.036104] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1871.036394] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9769706-9c65-4452-9262-88a7a278983c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.189765] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.190029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.190261] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.190455] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.190639] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.192735] env[62508]: INFO nova.compute.manager [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Terminating instance [ 1871.194523] env[62508]: DEBUG nova.compute.manager [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1871.194713] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1871.195583] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48304e41-3d15-4498-b611-7170a9d0d8f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.203391] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1871.203637] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c3036a0-7702-4e3c-bb1e-12dc61c56906 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.209385] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1871.209385] env[62508]: value = "task-1776770" [ 1871.209385] env[62508]: _type = "Task" [ 1871.209385] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.213775] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1871.213975] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1871.214172] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] d9341873-6ce8-4410-ae11-768c05c59f64 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.214766] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c561c3e0-fe93-415f-b12c-564302c95f96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.221566] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.225413] env[62508]: DEBUG oslo_vmware.api [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1871.225413] env[62508]: value = "task-1776771" [ 1871.225413] env[62508]: _type = "Task" [ 1871.225413] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.233179] env[62508]: DEBUG oslo_vmware.api [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.371894] env[62508]: ERROR nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [req-2b106b69-bf5d-461e-b713-bc271ad3f5cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2b106b69-bf5d-461e-b713-bc271ad3f5cd"}]} [ 1871.390701] env[62508]: DEBUG nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1871.404745] env[62508]: DEBUG nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1871.405019] env[62508]: DEBUG nova.compute.provider_tree [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1871.417675] env[62508]: DEBUG nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1871.437548] env[62508]: DEBUG nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1871.501015] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5202ab2f-719e-75ae-9195-4e27a367b880, 'name': SearchDatastore_Task, 'duration_secs': 0.009539} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.503511] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.503777] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a89db7c6-b0d9-44c0-8015-8a96f09200f6/a89db7c6-b0d9-44c0-8015-8a96f09200f6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1871.504239] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1a46cfd-7f45-42d8-a6fc-df56f5833a11 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.512411] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1871.512411] env[62508]: value = "task-1776772" [ 1871.512411] env[62508]: _type = "Task" [ 1871.512411] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.522166] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.585772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c1384e-efcc-41a3-930f-3be9a323b631 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.593553] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04af4e02-8ac3-4b71-81f9-d4700b521e3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.624632] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7e4293-345c-493a-bd81-92c08601433e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.632337] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923b0c67-1870-4e11-a79b-b37fad69819b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.646342] env[62508]: DEBUG nova.compute.provider_tree [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1871.721387] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776770, 'name': PowerOffVM_Task, 'duration_secs': 0.222822} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.721816] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1871.721903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1871.722728] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32d177dc-e692-4e2e-94b1-398b9b438dd1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.734199] env[62508]: DEBUG oslo_vmware.api [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218365} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.734451] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.734636] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1871.734817] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1871.734990] env[62508]: INFO nova.compute.manager [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Took 0.71 seconds to destroy the instance on the hypervisor. [ 1871.735262] env[62508]: DEBUG oslo.service.loopingcall [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.735450] env[62508]: DEBUG nova.compute.manager [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1871.735547] env[62508]: DEBUG nova.network.neutron [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1871.934092] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1871.934092] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1871.934092] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] b74d8374-d5ae-456b-9e9e-ec09459a737b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.934695] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d10a37a-8649-44d8-937c-856ea0ef9725 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.941198] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1871.941198] env[62508]: value = "task-1776774" [ 1871.941198] env[62508]: _type = "Task" [ 1871.941198] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.950096] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.015066] env[62508]: DEBUG nova.compute.manager [req-05e46645-f399-4b56-9d5e-036031f5d384 req-063e1b18-0856-46bf-bc60-c63e0e54f678 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Received event network-vif-deleted-13de194a-6e4e-4af3-bdef-70dcae738549 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.015066] env[62508]: INFO nova.compute.manager [req-05e46645-f399-4b56-9d5e-036031f5d384 req-063e1b18-0856-46bf-bc60-c63e0e54f678 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Neutron deleted interface 13de194a-6e4e-4af3-bdef-70dcae738549; detaching it from the instance and deleting it from the info cache [ 1872.015308] env[62508]: DEBUG nova.network.neutron [req-05e46645-f399-4b56-9d5e-036031f5d384 req-063e1b18-0856-46bf-bc60-c63e0e54f678 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.026712] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776772, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.176273] env[62508]: DEBUG nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 175 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1872.176571] env[62508]: DEBUG nova.compute.provider_tree [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 175 to 176 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1872.176805] env[62508]: DEBUG nova.compute.provider_tree [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1872.188352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.188583] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.188854] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.189078] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.189262] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.191541] env[62508]: INFO nova.compute.manager [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Terminating instance [ 1872.193836] env[62508]: DEBUG nova.compute.manager [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1872.193995] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1872.194842] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f289e7ea-361b-463a-88b2-918a0f778f7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.202935] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1872.203187] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17d1b7b1-bf94-4c78-8c10-55096d2c6e47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.211772] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1872.211772] env[62508]: value = "task-1776776" [ 1872.211772] env[62508]: _type = "Task" [ 1872.211772] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.222737] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.451311] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.491448] env[62508]: DEBUG nova.network.neutron [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.522443] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ed3364d-ff39-457a-ac1c-68d689591253 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.529942] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.843844} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.531099] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a89db7c6-b0d9-44c0-8015-8a96f09200f6/a89db7c6-b0d9-44c0-8015-8a96f09200f6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1872.531333] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1872.531713] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d27145df-d132-427c-b0e5-3c303fca7866 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.536427] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f96251-a202-48ae-ad15-90c5d47aa62b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.552639] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1872.552639] env[62508]: value = "task-1776777" [ 1872.552639] env[62508]: _type = "Task" [ 1872.552639] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.562920] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.572626] env[62508]: DEBUG nova.compute.manager [req-05e46645-f399-4b56-9d5e-036031f5d384 req-063e1b18-0856-46bf-bc60-c63e0e54f678 service nova] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Detach interface failed, port_id=13de194a-6e4e-4af3-bdef-70dcae738549, reason: Instance d9341873-6ce8-4410-ae11-768c05c59f64 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1872.681992] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.539s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.706733] env[62508]: INFO nova.scheduler.client.report [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted allocations for instance aa7c5176-4420-44b1-9fea-6db7561492c7 [ 1872.721228] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776776, 'name': PowerOffVM_Task, 'duration_secs': 0.232585} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.721495] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1872.721687] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1872.721966] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-348b1e50-56b2-4b59-bcc6-953319fc6731 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.846731] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1872.846916] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1872.847124] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleting the datastore file [datastore1] 544d165c-5054-4c57-a5d9-ac69046c6fbc {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1872.847579] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44db0ec2-a38b-4698-97da-051a1be74991 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.854045] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1872.854045] env[62508]: value = "task-1776779" [ 1872.854045] env[62508]: _type = "Task" [ 1872.854045] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.863068] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.951156] env[62508]: DEBUG oslo_vmware.api [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.632602} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.951515] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1872.951673] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1872.951851] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1872.952197] env[62508]: INFO nova.compute.manager [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1872.952332] env[62508]: DEBUG oslo.service.loopingcall [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.952500] env[62508]: DEBUG nova.compute.manager [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1872.952611] env[62508]: DEBUG nova.network.neutron [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1872.995033] env[62508]: INFO nova.compute.manager [-] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Took 1.26 seconds to deallocate network for instance. [ 1873.062441] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.284195} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.062749] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1873.063571] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f20d15-67e3-418e-b5ca-ab4abef19a39 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.086772] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] a89db7c6-b0d9-44c0-8015-8a96f09200f6/a89db7c6-b0d9-44c0-8015-8a96f09200f6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.087060] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12cae67d-6ee0-456b-ac64-f4b148f8f2d1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.109432] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1873.109432] env[62508]: value = "task-1776780" [ 1873.109432] env[62508]: _type = "Task" [ 1873.109432] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.121370] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776780, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.215479] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46734f14-96d7-4983-bffc-6f009867b4c2 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "aa7c5176-4420-44b1-9fea-6db7561492c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.546s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.346258] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.346568] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.346807] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.347083] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.347298] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.349903] env[62508]: INFO nova.compute.manager [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Terminating instance [ 1873.351450] env[62508]: DEBUG nova.compute.manager [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.351720] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.353059] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727c3b45-3aeb-4267-ad0f-91dab9e793a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.364509] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.368114] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62129bfc-869c-41d2-b7bc-93f09c0b32a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.369669] env[62508]: DEBUG oslo_vmware.api [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248966} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.369942] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1873.370180] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1873.370393] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1873.370602] env[62508]: INFO nova.compute.manager [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1873.370874] env[62508]: DEBUG oslo.service.loopingcall [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.371426] env[62508]: DEBUG nova.compute.manager [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1873.371544] env[62508]: DEBUG nova.network.neutron [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1873.375031] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1873.375031] env[62508]: value = "task-1776781" [ 1873.375031] env[62508]: _type = "Task" [ 1873.375031] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.382948] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.501483] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.501483] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.501803] env[62508]: DEBUG nova.objects.instance [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'resources' on Instance uuid d9341873-6ce8-4410-ae11-768c05c59f64 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1873.620555] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776780, 'name': ReconfigVM_Task, 'duration_secs': 0.339571} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.620811] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Reconfigured VM instance instance-0000006e to attach disk [datastore1] a89db7c6-b0d9-44c0-8015-8a96f09200f6/a89db7c6-b0d9-44c0-8015-8a96f09200f6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.621460] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f394d72-4278-4ecb-8e36-acb9ee9a93ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.627913] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1873.627913] env[62508]: value = "task-1776782" [ 1873.627913] env[62508]: _type = "Task" [ 1873.627913] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.636838] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776782, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.665945] env[62508]: DEBUG nova.network.neutron [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.885644] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776781, 'name': PowerOffVM_Task, 'duration_secs': 0.155536} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.885845] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1873.886027] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1873.886671] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c68860c-fe51-4716-8638-b367e1739e9a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.043361] env[62508]: DEBUG nova.compute.manager [req-7aea5da0-c5ab-4486-aa6c-332964425e9a req-5affa7c2-48cc-4577-9608-2773c262294e service nova] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Received event network-vif-deleted-c262a662-8a1b-45b0-b251-df840455ae82 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1874.043612] env[62508]: DEBUG nova.compute.manager [req-7aea5da0-c5ab-4486-aa6c-332964425e9a req-5affa7c2-48cc-4577-9608-2773c262294e service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Received event network-vif-deleted-e0bb9906-0d2a-4bdb-bbe4-5a3074c66499 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1874.043760] env[62508]: INFO nova.compute.manager [req-7aea5da0-c5ab-4486-aa6c-332964425e9a req-5affa7c2-48cc-4577-9608-2773c262294e service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Neutron deleted interface e0bb9906-0d2a-4bdb-bbe4-5a3074c66499; detaching it from the instance and deleting it from the info cache [ 1874.043952] env[62508]: DEBUG nova.network.neutron [req-7aea5da0-c5ab-4486-aa6c-332964425e9a req-5affa7c2-48cc-4577-9608-2773c262294e service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.137359] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776782, 'name': Rename_Task, 'duration_secs': 0.143719} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.137698] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1874.140025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66ed18e8-8af0-4827-b876-98d571a4db78 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.147008] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1874.147008] env[62508]: value = "task-1776784" [ 1874.147008] env[62508]: _type = "Task" [ 1874.147008] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.151385] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a349b8d4-454d-4be2-8a3e-901b01f09f26 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.158484] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.162143] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185f9dca-a072-454c-ae02-7f562364dbdb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.169168] env[62508]: INFO nova.compute.manager [-] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Took 1.22 seconds to deallocate network for instance. [ 1874.198906] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ff52e3-c667-4fb2-a7ed-705c2e71d09f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.207689] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6925115c-2bbf-49e2-9124-8af2e64d5cf7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.223083] env[62508]: DEBUG nova.compute.provider_tree [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1874.276339] env[62508]: DEBUG nova.network.neutron [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.335475] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1874.335728] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368832', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'name': 'volume-464c4a86-7294-4ef5-8433-0176e235cc8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'serial': '464c4a86-7294-4ef5-8433-0176e235cc8b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1874.336730] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3eb3ed-3701-4ab0-b3cd-3d0aea25d2d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.355052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27881d3d-d751-4fb2-8bb9-4b171d471711 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.381083] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-464c4a86-7294-4ef5-8433-0176e235cc8b/volume-464c4a86-7294-4ef5-8433-0176e235cc8b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1874.381336] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cfc9ae3-36bd-4ae9-aba6-cce7304ade0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.400401] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1874.400401] env[62508]: value = "task-1776785" [ 1874.400401] env[62508]: _type = "Task" [ 1874.400401] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.409555] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.546436] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3686bc2d-8621-4c0d-a70d-1e0ecf47112b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.555790] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f45540-1b77-4a85-8286-7c1de497cf52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.587511] env[62508]: DEBUG nova.compute.manager [req-7aea5da0-c5ab-4486-aa6c-332964425e9a req-5affa7c2-48cc-4577-9608-2773c262294e service nova] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Detach interface failed, port_id=e0bb9906-0d2a-4bdb-bbe4-5a3074c66499, reason: Instance 544d165c-5054-4c57-a5d9-ac69046c6fbc could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1874.657597] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776784, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.703068] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.756691] env[62508]: DEBUG nova.scheduler.client.report [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1874.756959] env[62508]: DEBUG nova.compute.provider_tree [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 176 to 177 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1874.757182] env[62508]: DEBUG nova.compute.provider_tree [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1874.779041] env[62508]: INFO nova.compute.manager [-] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Took 1.41 seconds to deallocate network for instance. [ 1874.911295] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776785, 'name': ReconfigVM_Task, 'duration_secs': 0.370974} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.911634] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-464c4a86-7294-4ef5-8433-0176e235cc8b/volume-464c4a86-7294-4ef5-8433-0176e235cc8b.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.916476] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61b17786-f3c7-4cfd-89a2-935f40c3e474 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.931843] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1874.931843] env[62508]: value = "task-1776786" [ 1874.931843] env[62508]: _type = "Task" [ 1874.931843] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.939707] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.157153] env[62508]: DEBUG oslo_vmware.api [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776784, 'name': PowerOnVM_Task, 'duration_secs': 0.553862} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.157450] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1875.157623] env[62508]: INFO nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1875.157804] env[62508]: DEBUG nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1875.158583] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c86e470-a9ed-41fc-a890-30589ff1bb1a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.262103] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.264755] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.562s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.264990] env[62508]: DEBUG nova.objects.instance [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'resources' on Instance uuid b74d8374-d5ae-456b-9e9e-ec09459a737b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.283202] env[62508]: INFO nova.scheduler.client.report [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance d9341873-6ce8-4410-ae11-768c05c59f64 [ 1875.284859] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.441472] env[62508]: DEBUG oslo_vmware.api [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776786, 'name': ReconfigVM_Task, 'duration_secs': 0.130305} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.441802] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368832', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'name': 'volume-464c4a86-7294-4ef5-8433-0176e235cc8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'serial': '464c4a86-7294-4ef5-8433-0176e235cc8b'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1875.676050] env[62508]: INFO nova.compute.manager [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Took 14.47 seconds to build instance. [ 1875.790895] env[62508]: DEBUG oslo_concurrency.lockutils [None req-90d04056-3e20-4688-ac70-b6279997fa08 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "d9341873-6ce8-4410-ae11-768c05c59f64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.773s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.869132] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c677e1-a52b-4bad-80e6-cafa787a52a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.878279] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea60956d-6b9f-4852-9e2a-0bc32c27673d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.907748] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93dba6f3-e5a4-4c48-9385-d1b1bce0d0c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.914450] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8adac03-aabe-4fe2-bf89-e14ad8583022 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.926986] env[62508]: DEBUG nova.compute.provider_tree [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1876.181695] env[62508]: DEBUG oslo_concurrency.lockutils [None req-280529fe-3ead-4c34-8f2f-274613ab774a tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.003s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.429962] env[62508]: DEBUG nova.scheduler.client.report [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1876.478158] env[62508]: DEBUG nova.objects.instance [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.935646] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.939025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.654s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.939025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.954476] env[62508]: INFO nova.scheduler.client.report [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocations for instance b74d8374-d5ae-456b-9e9e-ec09459a737b [ 1876.955631] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.955856] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.956075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.956263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.956432] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.958532] env[62508]: INFO nova.scheduler.client.report [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted allocations for instance 544d165c-5054-4c57-a5d9-ac69046c6fbc [ 1876.961456] env[62508]: INFO nova.compute.manager [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Terminating instance [ 1876.964688] env[62508]: DEBUG nova.compute.manager [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1876.964879] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1876.965886] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc47b8c-66fb-4af3-9a7e-635bdb376f52 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.975056] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1876.975298] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9901cf5f-6cca-4c02-95cb-aa7da1e628ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.983086] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1876.983086] env[62508]: value = "task-1776787" [ 1876.983086] env[62508]: _type = "Task" [ 1876.983086] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.983488] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9069fd0c-3e6b-4b83-af8d-4d7341457c97 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.247s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.993302] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.074465] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.074731] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.468633] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e1422268-87a6-4f1f-92d0-5f2efb9ed949 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "b74d8374-d5ae-456b-9e9e-ec09459a737b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.278s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.473205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eb559b78-ce0b-47e5-a368-60979c9a7c39 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "544d165c-5054-4c57-a5d9-ac69046c6fbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.285s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.496205] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776787, 'name': PowerOffVM_Task, 'duration_secs': 0.189967} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.496539] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1877.496653] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1877.496890] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e15194bc-a4a5-4f17-a4ee-6f0da6afdfdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.577439] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1877.836378] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.836690] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.097587] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.097898] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.099405] env[62508]: INFO nova.compute.claims [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1878.217616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.217851] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.340041] env[62508]: DEBUG nova.compute.utils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1878.720325] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1878.843017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.240229] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.242446] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869ff014-28d5-4b84-9ec0-5adda477a6f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.252740] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fcfb2a-4798-41fd-82d2-263ed228d389 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.298117] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11f09a9-3990-4ad8-96a1-8f5c8187644c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.307601] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0696bce-d547-45e7-9cf0-e29d00d0debf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.326177] env[62508]: DEBUG nova.compute.provider_tree [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.830273] env[62508]: DEBUG nova.scheduler.client.report [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1879.901287] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.901551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.901858] env[62508]: INFO nova.compute.manager [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attaching volume e98c1101-0885-4ec9-b087-9a571840c785 to /dev/sdc [ 1879.923039] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1879.923345] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1879.923547] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleting the datastore file [datastore1] 0a4958d5-b9a9-4854-90ca-f19eb34cb15b {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1879.923821] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b2d06fb-dd1e-40fe-8964-9ce3d973e4fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.933210] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for the task: (returnval){ [ 1879.933210] env[62508]: value = "task-1776790" [ 1879.933210] env[62508]: _type = "Task" [ 1879.933210] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.937660] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1879.937846] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1879.938040] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] a89db7c6-b0d9-44c0-8015-8a96f09200f6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1879.938586] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2765cb92-fdca-4ffc-b692-73b19093b400 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.943016] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.946859] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1879.946859] env[62508]: value = "task-1776791" [ 1879.946859] env[62508]: _type = "Task" [ 1879.946859] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.954401] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.013440] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d0369b-654a-493b-bb70-811c0e813402 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.020719] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4591e8-b12e-4a65-851b-8dfd0d9c5622 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.034409] env[62508]: DEBUG nova.virt.block_device [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating existing volume attachment record: f48c8258-0f84-4f63-824c-eb8ae0bd5f0a {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1880.335577] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.336154] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1880.338875] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.099s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.340605] env[62508]: INFO nova.compute.claims [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1880.443555] env[62508]: DEBUG oslo_vmware.api [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Task: {'id': task-1776790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142662} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.443797] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1880.443985] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1880.444182] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1880.444367] env[62508]: INFO nova.compute.manager [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Took 7.09 seconds to destroy the instance on the hypervisor. [ 1880.444609] env[62508]: DEBUG oslo.service.loopingcall [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.444797] env[62508]: DEBUG nova.compute.manager [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1880.444894] env[62508]: DEBUG nova.network.neutron [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1880.458356] env[62508]: DEBUG oslo_vmware.api [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128729} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.458785] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1880.459062] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1880.459333] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1880.459583] env[62508]: INFO nova.compute.manager [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Took 3.49 seconds to destroy the instance on the hypervisor. [ 1880.459916] env[62508]: DEBUG oslo.service.loopingcall [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.460187] env[62508]: DEBUG nova.compute.manager [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1880.460302] env[62508]: DEBUG nova.network.neutron [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1880.767840] env[62508]: DEBUG nova.compute.manager [req-30a859dd-e0e6-4e44-bbfa-717e744ae35a req-2ce9e7ed-f7dc-4210-a0e6-708eeae0c6d5 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Received event network-vif-deleted-ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1880.768280] env[62508]: INFO nova.compute.manager [req-30a859dd-e0e6-4e44-bbfa-717e744ae35a req-2ce9e7ed-f7dc-4210-a0e6-708eeae0c6d5 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Neutron deleted interface ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4; detaching it from the instance and deleting it from the info cache [ 1880.768389] env[62508]: DEBUG nova.network.neutron [req-30a859dd-e0e6-4e44-bbfa-717e744ae35a req-2ce9e7ed-f7dc-4210-a0e6-708eeae0c6d5 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.838753] env[62508]: DEBUG nova.compute.manager [req-c0701c02-660d-4feb-bebe-e91d154740c4 req-ffa28bc2-e123-4529-8301-e0373820d60e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Received event network-vif-deleted-420ade26-2a3d-41e8-8bcd-3dd540bb74d6 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1880.839165] env[62508]: INFO nova.compute.manager [req-c0701c02-660d-4feb-bebe-e91d154740c4 req-ffa28bc2-e123-4529-8301-e0373820d60e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Neutron deleted interface 420ade26-2a3d-41e8-8bcd-3dd540bb74d6; detaching it from the instance and deleting it from the info cache [ 1880.839165] env[62508]: DEBUG nova.network.neutron [req-c0701c02-660d-4feb-bebe-e91d154740c4 req-ffa28bc2-e123-4529-8301-e0373820d60e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.845220] env[62508]: DEBUG nova.compute.utils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.848701] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1880.848851] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1880.886874] env[62508]: DEBUG nova.policy [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '466fd3a805b24749b134fe7977a5ac86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e90ec7156574be597a12f4fa0e8c1dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1881.063394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.063394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.063394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.063394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.063394] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.065196] env[62508]: INFO nova.compute.manager [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Terminating instance [ 1881.066848] env[62508]: DEBUG nova.compute.manager [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1881.067060] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1881.067912] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d66c4ca-27ca-460c-811d-babdec143268 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.076105] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1881.076330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a432e752-20c2-4033-a15f-48bc6d24893e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.084012] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1881.084012] env[62508]: value = "task-1776793" [ 1881.084012] env[62508]: _type = "Task" [ 1881.084012] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.091516] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.146352] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Successfully created port: 4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1881.207195] env[62508]: DEBUG nova.network.neutron [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.240769] env[62508]: DEBUG nova.network.neutron [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.270665] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8c94ff6-cd63-4499-91ad-486b8b206a2c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.279849] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ecc295-68d7-47e1-bf8d-bd947f7a0b18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.309407] env[62508]: DEBUG nova.compute.manager [req-30a859dd-e0e6-4e44-bbfa-717e744ae35a req-2ce9e7ed-f7dc-4210-a0e6-708eeae0c6d5 service nova] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Detach interface failed, port_id=ea63968d-e30e-4d7f-a63a-1a1b83e0f9f4, reason: Instance 0a4958d5-b9a9-4854-90ca-f19eb34cb15b could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1881.341908] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bf7e55a-9691-417c-b096-455ad4b662f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.351025] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89227b63-0c0e-4a8e-9c98-346c46ef153f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.362851] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1881.386210] env[62508]: DEBUG nova.compute.manager [req-c0701c02-660d-4feb-bebe-e91d154740c4 req-ffa28bc2-e123-4529-8301-e0373820d60e service nova] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Detach interface failed, port_id=420ade26-2a3d-41e8-8bcd-3dd540bb74d6, reason: Instance a89db7c6-b0d9-44c0-8015-8a96f09200f6 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1881.487568] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9397aae9-323e-4c7e-bd30-941d8443a863 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.495880] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726af9aa-00a8-4b42-ad1e-2c96bc5d1b30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.527116] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa85b0c-f75c-4cc7-b3bb-06c135d3a208 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.534116] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c722c66e-d329-48e9-94b9-560b15eb4de6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.547345] env[62508]: DEBUG nova.compute.provider_tree [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.593999] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776793, 'name': PowerOffVM_Task, 'duration_secs': 0.273427} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.594167] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1881.594299] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1881.594582] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f35bad93-222c-497a-9f5d-58baddde3f92 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.709292] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1881.709540] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1881.709724] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleting the datastore file [datastore1] de69dbf0-86f1-4b05-a9db-8b9afaabe49c {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1881.710212] env[62508]: INFO nova.compute.manager [-] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Took 1.27 seconds to deallocate network for instance. [ 1881.710441] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f44357da-8a9e-432e-b03a-d72fa889e4c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.721967] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for the task: (returnval){ [ 1881.721967] env[62508]: value = "task-1776795" [ 1881.721967] env[62508]: _type = "Task" [ 1881.721967] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.730426] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.743165] env[62508]: INFO nova.compute.manager [-] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Took 1.28 seconds to deallocate network for instance. [ 1882.050565] env[62508]: DEBUG nova.scheduler.client.report [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1882.219286] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.231955] env[62508]: DEBUG oslo_vmware.api [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Task: {'id': task-1776795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144692} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.232251] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1882.232439] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1882.232638] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1882.232829] env[62508]: INFO nova.compute.manager [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1882.233170] env[62508]: DEBUG oslo.service.loopingcall [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1882.233380] env[62508]: DEBUG nova.compute.manager [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1882.233476] env[62508]: DEBUG nova.network.neutron [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1882.249864] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.374904] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1882.402692] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1882.402968] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1882.403186] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1882.403404] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1882.403577] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1882.403743] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1882.403981] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1882.404228] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1882.404381] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1882.404641] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1882.404790] env[62508]: DEBUG nova.virt.hardware [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1882.405737] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1677f1b8-468a-4c65-8dc3-d11ad56adeca {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.415037] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e197e20a-802f-4f0d-ad75-382196b96816 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.556610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.557241] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1882.559999] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.341s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.560260] env[62508]: DEBUG nova.objects.instance [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lazy-loading 'resources' on Instance uuid 0a4958d5-b9a9-4854-90ca-f19eb34cb15b {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1882.658395] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Successfully updated port: 4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.797205] env[62508]: DEBUG nova.compute.manager [req-8bd22a04-69f2-4a1d-871d-11ee8dcad6fb req-fd636a9f-3f48-4d48-bd66-9c90c7351647 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Received event network-vif-deleted-d8028a3e-f50d-41fa-b065-a2babc831eec {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1882.797433] env[62508]: INFO nova.compute.manager [req-8bd22a04-69f2-4a1d-871d-11ee8dcad6fb req-fd636a9f-3f48-4d48-bd66-9c90c7351647 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Neutron deleted interface d8028a3e-f50d-41fa-b065-a2babc831eec; detaching it from the instance and deleting it from the info cache [ 1882.797585] env[62508]: DEBUG nova.network.neutron [req-8bd22a04-69f2-4a1d-871d-11ee8dcad6fb req-fd636a9f-3f48-4d48-bd66-9c90c7351647 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.864729] env[62508]: DEBUG nova.compute.manager [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Received event network-vif-plugged-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1882.864729] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.865113] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.865298] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.865471] env[62508]: DEBUG nova.compute.manager [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] No waiting events found dispatching network-vif-plugged-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1882.865641] env[62508]: WARNING nova.compute.manager [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Received unexpected event network-vif-plugged-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 for instance with vm_state building and task_state spawning. [ 1882.865807] env[62508]: DEBUG nova.compute.manager [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Received event network-changed-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1882.865966] env[62508]: DEBUG nova.compute.manager [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Refreshing instance network info cache due to event network-changed-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1882.866168] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Acquiring lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.866308] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Acquired lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.866468] env[62508]: DEBUG nova.network.neutron [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Refreshing network info cache for port 4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1882.964274] env[62508]: DEBUG nova.network.neutron [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.063096] env[62508]: DEBUG nova.compute.utils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1883.068148] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1883.068528] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1883.107624] env[62508]: DEBUG nova.policy [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2561183ef9c54615988c33906fc5f84e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce0dd059301e41abb3758625d38e435e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1883.160275] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.181298] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28356af-971f-43f0-b5b8-9391899fd6e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.189438] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6a69aa-1ba6-48b3-98ea-1e7bba551747 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.221414] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815a23ea-3071-48a6-ab41-4e42fc16a882 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.229570] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece15c8e-cf09-4e04-b2cf-ec95e490a75b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.243021] env[62508]: DEBUG nova.compute.provider_tree [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1883.300116] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9c32afb-9ab5-4a42-a11e-12e000a633fb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.310516] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6df81c-2748-49df-87dc-fa6383c81cf6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.343630] env[62508]: DEBUG nova.compute.manager [req-8bd22a04-69f2-4a1d-871d-11ee8dcad6fb req-fd636a9f-3f48-4d48-bd66-9c90c7351647 service nova] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Detach interface failed, port_id=d8028a3e-f50d-41fa-b065-a2babc831eec, reason: Instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1883.375631] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Successfully created port: 0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1883.403590] env[62508]: DEBUG nova.network.neutron [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1883.466509] env[62508]: INFO nova.compute.manager [-] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Took 1.23 seconds to deallocate network for instance. [ 1883.503925] env[62508]: DEBUG nova.network.neutron [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.569371] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1883.746429] env[62508]: DEBUG nova.scheduler.client.report [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1883.974440] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.007997] env[62508]: DEBUG oslo_concurrency.lockutils [req-833d7acb-86df-4230-9462-b0e1e42b2ea2 req-4b5902f0-64a0-442a-a9b5-96affaf8e9c3 service nova] Releasing lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.007997] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.007997] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1884.251514] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.253918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.004s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.254188] env[62508]: DEBUG nova.objects.instance [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid a89db7c6-b0d9-44c0-8015-8a96f09200f6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1884.275234] env[62508]: INFO nova.scheduler.client.report [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Deleted allocations for instance 0a4958d5-b9a9-4854-90ca-f19eb34cb15b [ 1884.544948] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1884.579248] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1884.579492] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368833', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'name': 'volume-e98c1101-0885-4ec9-b087-9a571840c785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'serial': 'e98c1101-0885-4ec9-b087-9a571840c785'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1884.580728] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1884.583253] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26b3420-38aa-49f3-b673-d608103adfd9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.602473] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e700e828-6b75-49a2-8880-222c70950edd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.631426] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-e98c1101-0885-4ec9-b087-9a571840c785/volume-e98c1101-0885-4ec9-b087-9a571840c785.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1884.634022] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1884.634274] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1884.634433] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1884.634616] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1884.634762] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1884.634910] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1884.635142] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1884.635315] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1884.635550] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1884.635793] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1884.635980] env[62508]: DEBUG nova.virt.hardware [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1884.638670] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-507ec0b0-6a73-4141-ae90-f91392c34f85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.652190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62c65a9-3147-4ad0-be7e-cb5ccdfcc2ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.660385] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809653e5-33c0-4141-8dc7-c87a744ddec7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.665147] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1884.665147] env[62508]: value = "task-1776797" [ 1884.665147] env[62508]: _type = "Task" [ 1884.665147] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.680742] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.733935] env[62508]: DEBUG nova.network.neutron [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [{"id": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "address": "fa:16:3e:13:90:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae7fae0-9c", "ovs_interfaceid": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.784521] env[62508]: DEBUG oslo_concurrency.lockutils [None req-603cbe96-cf42-47e8-9e5b-9d7232e7eef4 tempest-AttachInterfacesTestJSON-1910392284 tempest-AttachInterfacesTestJSON-1910392284-project-member] Lock "0a4958d5-b9a9-4854-90ca-f19eb34cb15b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.437s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.867116] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05de4954-1e0d-432f-a406-aa68c58030b6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.875437] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ee7c2c-ed4f-41f7-a6ab-4fe524e57663 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.909140] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Successfully updated port: 0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1884.910828] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8400fbf2-681f-4fd6-bcc2-3f0d912161f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.914252] env[62508]: DEBUG nova.compute.manager [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Received event network-vif-plugged-0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1884.914490] env[62508]: DEBUG oslo_concurrency.lockutils [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] Acquiring lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.914727] env[62508]: DEBUG oslo_concurrency.lockutils [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.914919] env[62508]: DEBUG oslo_concurrency.lockutils [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.915118] env[62508]: DEBUG nova.compute.manager [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] No waiting events found dispatching network-vif-plugged-0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1884.915351] env[62508]: WARNING nova.compute.manager [req-73462788-ad33-426e-9271-73e1a199a3ae req-6f3d38f7-1efe-49b3-a174-1fc45ec92938 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Received unexpected event network-vif-plugged-0b96af68-0b68-4eac-81c7-2d671bfe54fa for instance with vm_state building and task_state spawning. [ 1884.922033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0e3a29-3928-486a-b2a8-c9fb092f52aa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.936424] env[62508]: DEBUG nova.compute.provider_tree [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1885.175374] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776797, 'name': ReconfigVM_Task, 'duration_secs': 0.365698} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.176020] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-e98c1101-0885-4ec9-b087-9a571840c785/volume-e98c1101-0885-4ec9-b087-9a571840c785.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1885.180872] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da0f104f-4f0f-4fe4-b05a-dd474d839bd1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.196690] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1885.196690] env[62508]: value = "task-1776798" [ 1885.196690] env[62508]: _type = "Task" [ 1885.196690] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.204920] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.236690] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.237059] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance network_info: |[{"id": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "address": "fa:16:3e:13:90:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae7fae0-9c", "ovs_interfaceid": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1885.237539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:90:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.245836] env[62508]: DEBUG oslo.service.loopingcall [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.245887] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.246122] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7839e76-59b9-42ae-888d-350b378ba788 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.266109] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.266109] env[62508]: value = "task-1776799" [ 1885.266109] env[62508]: _type = "Task" [ 1885.266109] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.274227] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776799, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.417392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.417565] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.417729] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1885.438820] env[62508]: DEBUG nova.scheduler.client.report [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1885.707249] env[62508]: DEBUG oslo_vmware.api [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776798, 'name': ReconfigVM_Task, 'duration_secs': 0.147641} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.707249] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368833', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'name': 'volume-e98c1101-0885-4ec9-b087-9a571840c785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'serial': 'e98c1101-0885-4ec9-b087-9a571840c785'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1885.776282] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776799, 'name': CreateVM_Task, 'duration_secs': 0.461772} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.776485] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1885.783843] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.784029] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.784368] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1885.784618] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-161aeb51-7698-47c9-a1ac-93fed628c23f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.790464] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1885.790464] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eb284e-6e1a-8a6d-dfcc-9b10b76f0930" [ 1885.790464] env[62508]: _type = "Task" [ 1885.790464] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.799691] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eb284e-6e1a-8a6d-dfcc-9b10b76f0930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.944647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.946807] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.973s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.947052] env[62508]: DEBUG nova.objects.instance [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lazy-loading 'resources' on Instance uuid de69dbf0-86f1-4b05-a9db-8b9afaabe49c {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1885.952682] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1885.966765] env[62508]: INFO nova.scheduler.client.report [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance a89db7c6-b0d9-44c0-8015-8a96f09200f6 [ 1886.096348] env[62508]: DEBUG nova.network.neutron [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.301467] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52eb284e-6e1a-8a6d-dfcc-9b10b76f0930, 'name': SearchDatastore_Task, 'duration_secs': 0.011104} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.301788] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.302051] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.302308] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.302475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.302672] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1886.302948] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28c0ef41-343a-4299-bab3-a55423de4b80 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.312176] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1886.312424] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1886.313253] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8228a515-9b27-4279-a05e-12430ffd4e2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.318650] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1886.318650] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52deac5f-46a9-cf7a-e9eb-2d516f4d83e0" [ 1886.318650] env[62508]: _type = "Task" [ 1886.318650] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.326578] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52deac5f-46a9-cf7a-e9eb-2d516f4d83e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.481968] env[62508]: DEBUG oslo_concurrency.lockutils [None req-acae10d8-e5b3-44a4-bbdb-26adf5dc9f7d tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "a89db7c6-b0d9-44c0-8015-8a96f09200f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.526s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.584303] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95097003-03b4-432f-803d-870065979237 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.593507] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ed8c31-c83b-468c-990f-88d9e2a50ab6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.600393] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.600864] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Instance network_info: |[{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1886.601873] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:7f:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b96af68-0b68-4eac-81c7-2d671bfe54fa', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1886.610362] env[62508]: DEBUG oslo.service.loopingcall [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1886.635756] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1886.637067] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44035c12-88e6-4016-bc5a-2923434c1a9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.654018] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42353395-c12f-4b21-843d-23a4c404aba8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.661788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eca89d3-a6b9-4c26-92ca-5d3aa8ee88f0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.666964] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1886.666964] env[62508]: value = "task-1776800" [ 1886.666964] env[62508]: _type = "Task" [ 1886.666964] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.680808] env[62508]: DEBUG nova.compute.provider_tree [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1886.686281] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776800, 'name': CreateVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.747873] env[62508]: DEBUG nova.objects.instance [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.829447] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52deac5f-46a9-cf7a-e9eb-2d516f4d83e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008409} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.830300] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd24dc36-428d-4004-afb3-8532e236b792 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.835686] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1886.835686] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5277fd70-465e-39fd-522c-53d0c9a53d9b" [ 1886.835686] env[62508]: _type = "Task" [ 1886.835686] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.843647] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5277fd70-465e-39fd-522c-53d0c9a53d9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.927318] env[62508]: DEBUG nova.compute.manager [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Received event network-changed-0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1886.927386] env[62508]: DEBUG nova.compute.manager [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Refreshing instance network info cache due to event network-changed-0b96af68-0b68-4eac-81c7-2d671bfe54fa. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1886.927644] env[62508]: DEBUG oslo_concurrency.lockutils [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] Acquiring lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.927836] env[62508]: DEBUG oslo_concurrency.lockutils [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] Acquired lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.928081] env[62508]: DEBUG nova.network.neutron [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Refreshing network info cache for port 0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1887.176640] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776800, 'name': CreateVM_Task, 'duration_secs': 0.460222} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.176803] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1887.177537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.177681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.178017] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1887.178288] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bcde953-d843-4cb3-bef7-c00e5070951c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.183722] env[62508]: DEBUG nova.scheduler.client.report [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1887.186830] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1887.186830] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e27b45-8c83-e92e-00c9-328a806f2966" [ 1887.186830] env[62508]: _type = "Task" [ 1887.186830] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.197271] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e27b45-8c83-e92e-00c9-328a806f2966, 'name': SearchDatastore_Task, 'duration_secs': 0.010072} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.197730] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.198018] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1887.198299] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.253464] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa2e28fe-cd11-43d7-b0c5-6a61b511474e tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.351s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.284594] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.284831] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.350462] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5277fd70-465e-39fd-522c-53d0c9a53d9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010191} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.350853] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.351019] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.351315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.351906] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1887.351906] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03d8168a-433f-4eec-8a84-6f57dfb4a1c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.354126] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9df4e331-ecb8-4f8c-9d07-0027e7cb8c49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.360715] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1887.360715] env[62508]: value = "task-1776801" [ 1887.360715] env[62508]: _type = "Task" [ 1887.360715] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.365028] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1887.365215] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1887.366239] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfca2750-a9be-44f5-b8b4-ad68388a510b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.371887] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.374808] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1887.374808] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525f0220-4201-8260-d756-df065fc7ac31" [ 1887.374808] env[62508]: _type = "Task" [ 1887.374808] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.383345] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525f0220-4201-8260-d756-df065fc7ac31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.511305] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.511687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.695714] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.730892] env[62508]: INFO nova.scheduler.client.report [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Deleted allocations for instance de69dbf0-86f1-4b05-a9db-8b9afaabe49c [ 1887.787223] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1887.870516] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776801, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441959} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.873312] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1887.873535] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1887.873798] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31650566-765b-4cb1-a1e0-a5cb042b0d9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.884547] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525f0220-4201-8260-d756-df065fc7ac31, 'name': SearchDatastore_Task, 'duration_secs': 0.008395} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.886475] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1887.886475] env[62508]: value = "task-1776802" [ 1887.886475] env[62508]: _type = "Task" [ 1887.886475] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.886684] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-651c0f3c-4321-408b-b721-e5e559760762 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.897159] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1887.897159] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5294422e-bdab-92f4-1c39-485da4d0bb95" [ 1887.897159] env[62508]: _type = "Task" [ 1887.897159] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.900861] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776802, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.909634] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5294422e-bdab-92f4-1c39-485da4d0bb95, 'name': SearchDatastore_Task, 'duration_secs': 0.009356} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.909900] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.910183] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 716fc0ee-9aa7-4d2f-a5e0-024484bbe014/716fc0ee-9aa7-4d2f-a5e0-024484bbe014.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.910435] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6073b4f-b569-4389-9bcb-cc69c6433e68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.917426] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1887.917426] env[62508]: value = "task-1776803" [ 1887.917426] env[62508]: _type = "Task" [ 1887.917426] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.925988] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.956649] env[62508]: DEBUG nova.network.neutron [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updated VIF entry in instance network info cache for port 0b96af68-0b68-4eac-81c7-2d671bfe54fa. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1887.956649] env[62508]: DEBUG nova.network.neutron [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.015618] env[62508]: INFO nova.compute.manager [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Detaching volume 464c4a86-7294-4ef5-8433-0176e235cc8b [ 1888.051441] env[62508]: INFO nova.virt.block_device [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attempting to driver detach volume 464c4a86-7294-4ef5-8433-0176e235cc8b from mountpoint /dev/sdb [ 1888.051598] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1888.051837] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368832', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'name': 'volume-464c4a86-7294-4ef5-8433-0176e235cc8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'serial': '464c4a86-7294-4ef5-8433-0176e235cc8b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1888.052808] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2658861-4b31-4005-a960-36d2c517480c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.084950] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2af2cb-d136-4b6e-a2f0-dc5be405d3bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.093631] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33b7bf5-4716-41f5-8925-f56741588e2f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.119576] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e66f8d-5aea-4ecf-9b0f-6472f1ece26d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.137516] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] The volume has not been displaced from its original location: [datastore1] volume-464c4a86-7294-4ef5-8433-0176e235cc8b/volume-464c4a86-7294-4ef5-8433-0176e235cc8b.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1888.143598] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1888.144047] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3484012-e824-46a5-86f9-28cc393e5bbf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.164867] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1888.164867] env[62508]: value = "task-1776804" [ 1888.164867] env[62508]: _type = "Task" [ 1888.164867] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.174448] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776804, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.243140] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1429b5c4-218d-4172-8c4d-a21bfe7cf293 tempest-ServerActionsTestOtherA-1355844615 tempest-ServerActionsTestOtherA-1355844615-project-member] Lock "de69dbf0-86f1-4b05-a9db-8b9afaabe49c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.181s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.308520] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.308801] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.310631] env[62508]: INFO nova.compute.claims [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1888.400851] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776802, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.401274] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1888.401874] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2fb980-0e25-4db4-8721-a4238d8504e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.423729] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1888.424580] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6c98d11-0c77-40fa-9c53-7626a7f7dc97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.448257] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421499} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.449476] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 716fc0ee-9aa7-4d2f-a5e0-024484bbe014/716fc0ee-9aa7-4d2f-a5e0-024484bbe014.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1888.449733] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1888.450066] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1888.450066] env[62508]: value = "task-1776805" [ 1888.450066] env[62508]: _type = "Task" [ 1888.450066] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.450261] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50833959-fdf7-4039-844f-039396d35f34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.459969] env[62508]: DEBUG oslo_concurrency.lockutils [req-946ca930-56a7-4394-9f62-2f22069fc953 req-e664659d-8f8b-407c-8b75-5e86dd98314c service nova] Releasing lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.460355] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776805, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.461591] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1888.461591] env[62508]: value = "task-1776806" [ 1888.461591] env[62508]: _type = "Task" [ 1888.461591] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.469274] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.640071] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.640336] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.675544] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776804, 'name': ReconfigVM_Task, 'duration_secs': 0.27604} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.676020] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1888.681284] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8d1d7ae-eb48-41d6-87ed-2b55dd226a57 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.698110] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1888.698110] env[62508]: value = "task-1776807" [ 1888.698110] env[62508]: _type = "Task" [ 1888.698110] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.706382] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776807, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.962616] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776805, 'name': ReconfigVM_Task, 'duration_secs': 0.278255} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.965903] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1888.966548] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccf5671e-6a1f-4bda-8e9c-d6f552296fc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.973543] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.974732] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1888.975100] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1888.975100] env[62508]: value = "task-1776808" [ 1888.975100] env[62508]: _type = "Task" [ 1888.975100] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.975837] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0010ce2-30a6-482d-9ec6-9f79a42e6f19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.986038] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776808, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.005412] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 716fc0ee-9aa7-4d2f-a5e0-024484bbe014/716fc0ee-9aa7-4d2f-a5e0-024484bbe014.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.005711] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-839c6845-3005-418b-aa45-e232f52c660c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.025097] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1889.025097] env[62508]: value = "task-1776809" [ 1889.025097] env[62508]: _type = "Task" [ 1889.025097] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.033586] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776809, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.142878] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1889.208665] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776807, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.440776] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a989e29-45ec-407f-bf63-0c0fdde52464 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.448688] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7950fd06-1453-4e31-93f2-84363b2edf0a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.490931] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d5870f-4df3-4537-a011-58434da95433 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.498697] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776808, 'name': Rename_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.501699] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd79a62-837a-4e39-880d-37ea4030a991 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.514558] env[62508]: DEBUG nova.compute.provider_tree [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.535664] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776809, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.662047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.712156] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776807, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.992880] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776808, 'name': Rename_Task, 'duration_secs': 0.896043} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.993466] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.993899] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2522ad01-8793-4102-a276-271b71f838f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.001571] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1890.001571] env[62508]: value = "task-1776810" [ 1890.001571] env[62508]: _type = "Task" [ 1890.001571] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.011787] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.018671] env[62508]: DEBUG nova.scheduler.client.report [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1890.036603] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776809, 'name': ReconfigVM_Task, 'duration_secs': 0.988098} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.036695] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 716fc0ee-9aa7-4d2f-a5e0-024484bbe014/716fc0ee-9aa7-4d2f-a5e0-024484bbe014.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1890.038575] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-364448b3-5175-4ee1-9317-dcbe8907a3fa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.045702] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1890.045702] env[62508]: value = "task-1776811" [ 1890.045702] env[62508]: _type = "Task" [ 1890.045702] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.057844] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776811, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.209241] env[62508]: DEBUG oslo_vmware.api [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776807, 'name': ReconfigVM_Task, 'duration_secs': 1.119947} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.209553] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368832', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'name': 'volume-464c4a86-7294-4ef5-8433-0176e235cc8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': '464c4a86-7294-4ef5-8433-0176e235cc8b', 'serial': '464c4a86-7294-4ef5-8433-0176e235cc8b'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1890.512392] env[62508]: DEBUG oslo_vmware.api [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776810, 'name': PowerOnVM_Task, 'duration_secs': 0.471511} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.512715] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.512915] env[62508]: INFO nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Took 8.14 seconds to spawn the instance on the hypervisor. [ 1890.513103] env[62508]: DEBUG nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1890.513869] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133f94ab-ebba-403b-a651-bd7cc704dc65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.523112] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.523595] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1890.526104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.864s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.527440] env[62508]: INFO nova.compute.claims [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1890.556220] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776811, 'name': Rename_Task, 'duration_secs': 0.173824} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.556220] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1890.556385] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-278d5f21-9efd-46c4-bbf5-cccb9732cb7f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.562897] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1890.562897] env[62508]: value = "task-1776812" [ 1890.562897] env[62508]: _type = "Task" [ 1890.562897] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.572504] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.755977] env[62508]: DEBUG nova.objects.instance [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1891.031754] env[62508]: DEBUG nova.compute.utils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1891.038340] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1891.038340] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1891.038340] env[62508]: INFO nova.compute.manager [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Took 12.96 seconds to build instance. [ 1891.072830] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776812, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.127088] env[62508]: DEBUG nova.policy [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1891.543594] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1891.546805] env[62508]: DEBUG oslo_concurrency.lockutils [None req-dc562d9e-ddda-42f1-8376-34d5a48fcf24 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.472s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.574677] env[62508]: DEBUG oslo_vmware.api [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776812, 'name': PowerOnVM_Task, 'duration_secs': 0.939076} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.575051] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1891.575198] env[62508]: INFO nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1891.575492] env[62508]: DEBUG nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1891.578667] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e93c2-60a7-466d-bccd-208b3a50e1b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.679158] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Successfully created port: e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1891.688495] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aac4437-2915-4737-9a16-3542eb72e700 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.696920] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6050a7ec-5d5e-42a7-90fd-6c8f757afb34 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.727674] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902e065e-8b31-4bcb-8e04-4d32edd8627c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.735335] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4e7d1b-efba-4b55-ae41-31a9bd570e19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.748825] env[62508]: DEBUG nova.compute.provider_tree [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.764415] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1c0c4a6a-0a08-42b5-b25a-942cbbdc163d tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.253s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.891918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.892240] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.104962] env[62508]: INFO nova.compute.manager [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Took 12.88 seconds to build instance. [ 1892.253530] env[62508]: DEBUG nova.scheduler.client.report [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.397383] env[62508]: INFO nova.compute.manager [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Detaching volume e98c1101-0885-4ec9-b087-9a571840c785 [ 1892.443040] env[62508]: INFO nova.virt.block_device [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Attempting to driver detach volume e98c1101-0885-4ec9-b087-9a571840c785 from mountpoint /dev/sdc [ 1892.443388] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1892.443673] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368833', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'name': 'volume-e98c1101-0885-4ec9-b087-9a571840c785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'serial': 'e98c1101-0885-4ec9-b087-9a571840c785'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1892.444490] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13238b38-71ff-400f-a125-80fd89c47f21 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.470072] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccb7990-2a77-4517-a19b-5bf7f15b80b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.478861] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24033d25-5f9c-4adf-9a29-a0c02eb63daa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.501999] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cf7546-5c55-40d4-ad30-ad367b441d2e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.517985] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] The volume has not been displaced from its original location: [datastore1] volume-e98c1101-0885-4ec9-b087-9a571840c785/volume-e98c1101-0885-4ec9-b087-9a571840c785.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1892.523912] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1892.524360] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f71f4ed-d80e-46d0-83b1-7fb9ab262ac3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.543523] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1892.543523] env[62508]: value = "task-1776813" [ 1892.543523] env[62508]: _type = "Task" [ 1892.543523] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.554641] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.554641] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1892.584483] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1892.584868] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1892.585113] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.585439] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1892.585600] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.585837] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1892.586143] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1892.586396] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1892.586636] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1892.586890] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1892.587247] env[62508]: DEBUG nova.virt.hardware [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1892.589668] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c9a145-5952-446c-bf37-fd9255f9027c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.595270] env[62508]: DEBUG nova.compute.manager [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Stashing vm_state: active {{(pid=62508) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1892.602216] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0658274-c8de-4b34-818f-bddaec929984 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.606737] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c712b155-1339-4dcd-855b-d980d28baf89 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.389s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.759784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.760408] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1892.931316] env[62508]: DEBUG nova.compute.manager [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Received event network-changed-0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1892.931517] env[62508]: DEBUG nova.compute.manager [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Refreshing instance network info cache due to event network-changed-0b96af68-0b68-4eac-81c7-2d671bfe54fa. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1892.931738] env[62508]: DEBUG oslo_concurrency.lockutils [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] Acquiring lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.931882] env[62508]: DEBUG oslo_concurrency.lockutils [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] Acquired lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.932528] env[62508]: DEBUG nova.network.neutron [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Refreshing network info cache for port 0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1893.055064] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776813, 'name': ReconfigVM_Task, 'duration_secs': 0.256136} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.055667] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1893.060779] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50e6ab7e-25e9-45f5-8166-22450f62daf1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.080658] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1893.080658] env[62508]: value = "task-1776814" [ 1893.080658] env[62508]: _type = "Task" [ 1893.080658] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.095329] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.124993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.124993] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.267031] env[62508]: DEBUG nova.compute.utils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.267216] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1893.305885] env[62508]: DEBUG nova.compute.manager [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Received event network-vif-plugged-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1893.306101] env[62508]: DEBUG oslo_concurrency.lockutils [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] Acquiring lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.306312] env[62508]: DEBUG oslo_concurrency.lockutils [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.306483] env[62508]: DEBUG oslo_concurrency.lockutils [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.306659] env[62508]: DEBUG nova.compute.manager [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] No waiting events found dispatching network-vif-plugged-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1893.306828] env[62508]: WARNING nova.compute.manager [req-016479bd-ca58-426b-86aa-6a855345abce req-cd6da804-f428-4080-ad52-a58e98a81527 service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Received unexpected event network-vif-plugged-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 for instance with vm_state building and task_state spawning. [ 1893.399607] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Successfully updated port: e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.592278] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.630716] env[62508]: INFO nova.compute.claims [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.741247] env[62508]: DEBUG nova.network.neutron [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updated VIF entry in instance network info cache for port 0b96af68-0b68-4eac-81c7-2d671bfe54fa. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.742051] env[62508]: DEBUG nova.network.neutron [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.769577] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1893.901944] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.902179] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.902347] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1894.094548] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776814, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.139704] env[62508]: INFO nova.compute.resource_tracker [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating resource usage from migration 5b2a11ac-2293-49bf-ac02-457b44128118 [ 1894.244170] env[62508]: DEBUG oslo_concurrency.lockutils [req-6ba20e7c-f197-450c-84bb-22e0717111b6 req-11676007-49e6-41c1-a47f-4bd1da1dc314 service nova] Releasing lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.268324] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73857f41-86e5-41e1-ad98-0d0cf55d7188 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.280533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fcd51b-950d-4682-b093-e6ee6520e9d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.321374] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5122a6d-3b31-4740-81d0-7aee85925af5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.330605] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96eb2be-d97b-4c35-a9a7-9186e0d4c68b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.348319] env[62508]: DEBUG nova.compute.provider_tree [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.440126] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1894.556557] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.556814] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.556992] env[62508]: INFO nova.compute.manager [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Shelving [ 1894.594262] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776814, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.595198] env[62508]: DEBUG nova.network.neutron [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Updating instance_info_cache with network_info: [{"id": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "address": "fa:16:3e:5d:cc:09", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape356c32d-0a", "ovs_interfaceid": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.633661] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "1df05ee1-d92d-45be-8337-eba4322bda66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.633898] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.789931] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1894.816776] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1894.817016] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1894.817225] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1894.817420] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1894.817561] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1894.817702] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1894.817896] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1894.818069] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1894.818231] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1894.818388] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1894.818552] env[62508]: DEBUG nova.virt.hardware [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1894.819479] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714781a8-e132-4f56-8b0f-a2fc880c730a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.827546] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0aaeec-5176-455d-92ef-d1945a19eec2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.841555] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.847202] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Creating folder: Project (77ec7d6a12a6495c9e953af5a885ee70). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1894.847497] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21091662-d8a8-4dde-b08e-6665ea7783b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.852128] env[62508]: DEBUG nova.scheduler.client.report [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1894.858402] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Created folder: Project (77ec7d6a12a6495c9e953af5a885ee70) in parent group-v368536. [ 1894.858586] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Creating folder: Instances. Parent ref: group-v368836. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1894.858801] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4df6e2b2-d3e8-43e6-a5de-44c8c7fb8ca6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.868206] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Created folder: Instances in parent group-v368836. [ 1894.868435] env[62508]: DEBUG oslo.service.loopingcall [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.868615] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1894.868812] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd0b839f-035b-4e86-b54b-961f23e34c27 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.886139] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.886139] env[62508]: value = "task-1776817" [ 1894.886139] env[62508]: _type = "Task" [ 1894.886139] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.893244] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776817, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.065929] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1895.066276] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0110be2d-1dac-4204-a500-6fb42082ba49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.074801] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1895.074801] env[62508]: value = "task-1776818" [ 1895.074801] env[62508]: _type = "Task" [ 1895.074801] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.086338] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776818, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.097363] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.097806] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Instance network_info: |[{"id": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "address": "fa:16:3e:5d:cc:09", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape356c32d-0a", "ovs_interfaceid": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1895.098243] env[62508]: DEBUG oslo_vmware.api [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776814, 'name': ReconfigVM_Task, 'duration_secs': 1.859081} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.098836] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:cc:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e356c32d-0aab-4beb-8c5c-58de8aaf9cc4', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1895.111860] env[62508]: DEBUG oslo.service.loopingcall [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1895.112230] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368833', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'name': 'volume-e98c1101-0885-4ec9-b087-9a571840c785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3829b04-6d1f-44f0-8b94-30b582506ed4', 'attached_at': '', 'detached_at': '', 'volume_id': 'e98c1101-0885-4ec9-b087-9a571840c785', 'serial': 'e98c1101-0885-4ec9-b087-9a571840c785'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1895.115367] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1895.115670] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ad631c6-d74e-463a-8eeb-93acc7cc5959 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.137530] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1895.145687] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1895.145687] env[62508]: value = "task-1776819" [ 1895.145687] env[62508]: _type = "Task" [ 1895.145687] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.154435] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776819, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.331345] env[62508]: DEBUG nova.compute.manager [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Received event network-changed-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1895.331588] env[62508]: DEBUG nova.compute.manager [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Refreshing instance network info cache due to event network-changed-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1895.331848] env[62508]: DEBUG oslo_concurrency.lockutils [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] Acquiring lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.332044] env[62508]: DEBUG oslo_concurrency.lockutils [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] Acquired lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.332271] env[62508]: DEBUG nova.network.neutron [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Refreshing network info cache for port e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1895.356873] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.233s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.357144] env[62508]: INFO nova.compute.manager [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Migrating [ 1895.395812] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776817, 'name': CreateVM_Task, 'duration_secs': 0.298388} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.396021] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1895.396438] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.396608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.396976] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1895.397242] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52d0992d-7f4d-4996-9875-584e00055757 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.401689] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1895.401689] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac7bb1-f596-e677-9a71-f4dacd917e10" [ 1895.401689] env[62508]: _type = "Task" [ 1895.401689] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.409497] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac7bb1-f596-e677-9a71-f4dacd917e10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.585043] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776818, 'name': PowerOffVM_Task, 'duration_secs': 0.233497} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.585355] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1895.586148] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba04a5d5-b129-4caa-b2cd-f571cb95a2b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.605294] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3957333-3c87-4615-80f8-65085bd140e1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.655856] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776819, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.657203] env[62508]: DEBUG nova.objects.instance [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'flavor' on Instance uuid d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.660824] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.661067] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.662511] env[62508]: INFO nova.compute.claims [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1895.873741] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.873935] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.874124] env[62508]: DEBUG nova.network.neutron [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.914898] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ac7bb1-f596-e677-9a71-f4dacd917e10, 'name': SearchDatastore_Task, 'duration_secs': 0.009743} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.914898] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.914898] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.915123] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.915123] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.915308] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.915580] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9da3144e-0ff6-4d52-b7e1-0fc521230092 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.925792] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.925976] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.926715] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24ac8cae-8646-49b9-b1c9-4216f6ad5172 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.932405] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1895.932405] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae5e73-5648-4636-233b-e9be3cc9b248" [ 1895.932405] env[62508]: _type = "Task" [ 1895.932405] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.939872] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae5e73-5648-4636-233b-e9be3cc9b248, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.036414] env[62508]: DEBUG nova.network.neutron [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Updated VIF entry in instance network info cache for port e356c32d-0aab-4beb-8c5c-58de8aaf9cc4. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1896.036862] env[62508]: DEBUG nova.network.neutron [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Updating instance_info_cache with network_info: [{"id": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "address": "fa:16:3e:5d:cc:09", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape356c32d-0a", "ovs_interfaceid": "e356c32d-0aab-4beb-8c5c-58de8aaf9cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.115425] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1896.115731] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ff875308-ec38-4979-aaef-8cfa4d3fd6c6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.123773] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1896.123773] env[62508]: value = "task-1776820" [ 1896.123773] env[62508]: _type = "Task" [ 1896.123773] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.132342] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776820, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.157064] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776819, 'name': CreateVM_Task, 'duration_secs': 0.553459} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.157231] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1896.157911] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.158096] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.158423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1896.158678] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d73c11-6288-4d1b-b778-1cc65e0d6ae3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.165128] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1896.165128] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ce41bc-75c2-10ad-3212-7ebc1d012799" [ 1896.165128] env[62508]: _type = "Task" [ 1896.165128] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.176349] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ce41bc-75c2-10ad-3212-7ebc1d012799, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.443070] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ae5e73-5648-4636-233b-e9be3cc9b248, 'name': SearchDatastore_Task, 'duration_secs': 0.011089} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.446207] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a05fdc69-1f27-48bb-80c1-fa4a1cbf008d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.453879] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1896.453879] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526d0cb3-7fcc-90b7-bd82-e0a07d876321" [ 1896.453879] env[62508]: _type = "Task" [ 1896.453879] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.461248] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526d0cb3-7fcc-90b7-bd82-e0a07d876321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.539840] env[62508]: DEBUG oslo_concurrency.lockutils [req-15f07900-d6ed-40b8-b1ef-a8122f70a850 req-8f398d8c-50d0-465f-a34d-bbfdfa38c41d service nova] Releasing lock "refresh_cache-09918540-a9ce-4c76-84b9-fbe452d5abf3" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.577789] env[62508]: DEBUG nova.network.neutron [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [{"id": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "address": "fa:16:3e:13:90:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae7fae0-9c", "ovs_interfaceid": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.634525] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776820, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.665566] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5e2dc5cc-c431-43a1-9008-7a7b198b2ef4 tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.773s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.677762] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ce41bc-75c2-10ad-3212-7ebc1d012799, 'name': SearchDatastore_Task, 'duration_secs': 0.010676} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.678070] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.678300] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1896.678522] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.805052] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0f2d77-8dc5-4caa-80e0-40ba404ec2c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.812862] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625be306-00c9-4484-92fa-46076ad395e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.845380] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f5f0b7-7730-4459-8996-2083bba82d13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.855687] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f73196a-0590-40bf-8fb3-0e6541aa3579 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.872258] env[62508]: DEBUG nova.compute.provider_tree [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.963478] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]526d0cb3-7fcc-90b7-bd82-e0a07d876321, 'name': SearchDatastore_Task, 'duration_secs': 0.011499} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.963754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.963994] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1896.964289] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.964481] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1896.964695] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8876e36f-945d-4c61-929b-82c318d8e26d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.966601] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7af64d83-1b1c-442a-9017-869c4d0d405c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.973359] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1896.973359] env[62508]: value = "task-1776821" [ 1896.973359] env[62508]: _type = "Task" [ 1896.973359] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.976985] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1896.977173] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1896.978102] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa6aa188-9101-418f-81d5-13b2ce6f9cd4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.982892] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.986105] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1896.986105] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e79c69-a67e-293e-b75c-dededad31e9c" [ 1896.986105] env[62508]: _type = "Task" [ 1896.986105] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.993131] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e79c69-a67e-293e-b75c-dededad31e9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.081234] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.140232] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776820, 'name': CreateSnapshot_Task, 'duration_secs': 0.78457} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.140858] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1897.141949] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2b76cd-f9bb-4324-bd7f-86d29c6e7036 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.362902] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.363223] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.363474] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.363678] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.363847] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.366151] env[62508]: INFO nova.compute.manager [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Terminating instance [ 1897.368122] env[62508]: DEBUG nova.compute.manager [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1897.368903] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1897.369815] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be530e4-c020-43a5-8e05-77c668820f3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.376138] env[62508]: DEBUG nova.scheduler.client.report [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1897.383307] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1897.383846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-674ce255-9687-4a61-a459-4220ea1612bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.391354] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1897.391354] env[62508]: value = "task-1776822" [ 1897.391354] env[62508]: _type = "Task" [ 1897.391354] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.403606] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.484079] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495139} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.484364] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1897.484584] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1897.484846] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9b620af-e1e1-46ae-ac1c-b62db5fda1d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.495908] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e79c69-a67e-293e-b75c-dededad31e9c, 'name': SearchDatastore_Task, 'duration_secs': 0.008599} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.497583] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1897.497583] env[62508]: value = "task-1776823" [ 1897.497583] env[62508]: _type = "Task" [ 1897.497583] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.497812] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e7ce36f-1b16-4b6c-9c82-565c1888e749 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.506606] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1897.506606] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525eee44-9fd3-041b-0005-e46c676d9793" [ 1897.506606] env[62508]: _type = "Task" [ 1897.506606] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.509812] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.517664] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525eee44-9fd3-041b-0005-e46c676d9793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.663977] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1897.664330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a27e4891-ae61-4135-a012-ac28a49da3b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.673457] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1897.673457] env[62508]: value = "task-1776824" [ 1897.673457] env[62508]: _type = "Task" [ 1897.673457] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.684542] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776824, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.885485] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.902883] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776822, 'name': PowerOffVM_Task, 'duration_secs': 0.349864} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.903790] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1897.903971] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1897.904322] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ba36f2-6fa2-4984-9a28-e77349e0b3ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.013394] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065755} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.017909] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1898.019033] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5df5bc-b44e-4828-bab8-7720dd142c62 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.030019] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525eee44-9fd3-041b-0005-e46c676d9793, 'name': SearchDatastore_Task, 'duration_secs': 0.010935} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.041556] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.041556] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 09918540-a9ce-4c76-84b9-fbe452d5abf3/09918540-a9ce-4c76-84b9-fbe452d5abf3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1898.057270] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1898.059163] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-017f7ce9-71e7-4ed2-896a-429dc88d22d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.062067] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74cfbfcd-a291-4234-a62a-7f74d9e9167f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.086776] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1898.087148] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1898.087432] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleting the datastore file [datastore1] d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1898.087811] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47dfbabd-3795-491d-b1df-454d6c9567f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.095490] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1898.095490] env[62508]: value = "task-1776826" [ 1898.095490] env[62508]: _type = "Task" [ 1898.095490] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.096481] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1898.096481] env[62508]: value = "task-1776827" [ 1898.096481] env[62508]: _type = "Task" [ 1898.096481] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.105481] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for the task: (returnval){ [ 1898.105481] env[62508]: value = "task-1776828" [ 1898.105481] env[62508]: _type = "Task" [ 1898.105481] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.116044] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.116292] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776827, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.121195] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.184722] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776824, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.388467] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "f9e67ab8-71ec-463b-932d-ca0471d3882c" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.388762] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "f9e67ab8-71ec-463b-932d-ca0471d3882c" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.602818] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7102e3d5-5340-4871-b37e-83dfa1e7f01f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.632608] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 0 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1898.636608] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776826, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.641567] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.647010] env[62508]: DEBUG oslo_vmware.api [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Task: {'id': task-1776828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291161} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.647342] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1898.647559] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1898.648017] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1898.648253] env[62508]: INFO nova.compute.manager [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1898.648549] env[62508]: DEBUG oslo.service.loopingcall [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1898.648909] env[62508]: DEBUG nova.compute.manager [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1898.648909] env[62508]: DEBUG nova.network.neutron [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1898.685787] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776824, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.891933] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "f9e67ab8-71ec-463b-932d-ca0471d3882c" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.892659] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1899.111573] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593924} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.114713] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 09918540-a9ce-4c76-84b9-fbe452d5abf3/09918540-a9ce-4c76-84b9-fbe452d5abf3.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1899.114885] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1899.115493] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776827, 'name': ReconfigVM_Task, 'duration_secs': 0.780254} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.115644] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-909f7a94-b393-49f0-ad43-d96f393f724f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.120055] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Reconfigured VM instance instance-00000072 to attach disk [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1899.121125] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-721d3ec6-6ec5-41c7-a027-1a792842e67a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.129183] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1899.129183] env[62508]: value = "task-1776829" [ 1899.129183] env[62508]: _type = "Task" [ 1899.129183] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.131292] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1899.131292] env[62508]: value = "task-1776830" [ 1899.131292] env[62508]: _type = "Task" [ 1899.131292] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.139085] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1899.142680] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86260184-ed09-4f5e-a333-feb53292d35d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.144326] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.148979] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776830, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.154280] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1899.154280] env[62508]: value = "task-1776831" [ 1899.154280] env[62508]: _type = "Task" [ 1899.154280] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.165612] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.185569] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776824, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.288764] env[62508]: DEBUG nova.compute.manager [req-f773c922-1072-4c80-9335-3606ccfe0200 req-d33d6509-f2a9-4fd3-9fe6-c64985ebb9bf service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Received event network-vif-deleted-cfd14da4-5054-4b3f-bb35-eeefcb6843a9 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1899.289099] env[62508]: INFO nova.compute.manager [req-f773c922-1072-4c80-9335-3606ccfe0200 req-d33d6509-f2a9-4fd3-9fe6-c64985ebb9bf service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Neutron deleted interface cfd14da4-5054-4b3f-bb35-eeefcb6843a9; detaching it from the instance and deleting it from the info cache [ 1899.289368] env[62508]: DEBUG nova.network.neutron [req-f773c922-1072-4c80-9335-3606ccfe0200 req-d33d6509-f2a9-4fd3-9fe6-c64985ebb9bf service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.398417] env[62508]: DEBUG nova.compute.utils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.400622] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1899.401050] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1899.455514] env[62508]: DEBUG nova.policy [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '442f51f1866e46d68ff5e7193578f973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc62fa31b2f241f59bb410df89b334ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1899.593315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.593618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.593811] env[62508]: INFO nova.compute.manager [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Shelving [ 1899.646017] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073852} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.647796] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1899.648114] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776830, 'name': Rename_Task, 'duration_secs': 0.164165} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.649024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81f1d04-ad66-4374-88cf-5223abbd9bcd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.651770] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1899.652025] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0277365e-6571-4958-9ad2-7668307201a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.679601] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 09918540-a9ce-4c76-84b9-fbe452d5abf3/09918540-a9ce-4c76-84b9-fbe452d5abf3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1899.679601] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1899.679601] env[62508]: value = "task-1776832" [ 1899.679601] env[62508]: _type = "Task" [ 1899.679601] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.679601] env[62508]: DEBUG nova.network.neutron [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.679914] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9440e227-a568-4d39-8b4e-2a993b4fd660 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.698427] env[62508]: INFO nova.compute.manager [-] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Took 1.05 seconds to deallocate network for instance. [ 1899.710667] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776831, 'name': PowerOffVM_Task, 'duration_secs': 0.379296} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.713827] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.714037] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 17 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1899.719431] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1899.719431] env[62508]: value = "task-1776833" [ 1899.719431] env[62508]: _type = "Task" [ 1899.719431] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.723222] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776824, 'name': CloneVM_Task, 'duration_secs': 1.641728} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.729808] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Created linked-clone VM from snapshot [ 1899.730120] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776832, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.730845] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58695226-89a0-44ef-88cd-f6dff5673ced {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.743468] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Uploading image 353a10e7-4163-47d9-8158-bb4b40bd7029 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1899.745535] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.779589] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1899.779589] env[62508]: value = "vm-368841" [ 1899.779589] env[62508]: _type = "VirtualMachine" [ 1899.779589] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1899.779863] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4ec2c9d3-78cf-4f82-8be2-248f1f0bc46d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.787464] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease: (returnval){ [ 1899.787464] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ce057-f06e-8727-96fa-35d5b1f90d47" [ 1899.787464] env[62508]: _type = "HttpNfcLease" [ 1899.787464] env[62508]: } obtained for exporting VM: (result){ [ 1899.787464] env[62508]: value = "vm-368841" [ 1899.787464] env[62508]: _type = "VirtualMachine" [ 1899.787464] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1899.787738] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the lease: (returnval){ [ 1899.787738] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ce057-f06e-8727-96fa-35d5b1f90d47" [ 1899.787738] env[62508]: _type = "HttpNfcLease" [ 1899.787738] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1899.792339] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-118961e9-1d81-45b2-8833-1d04799cdf66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.797378] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1899.797378] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ce057-f06e-8727-96fa-35d5b1f90d47" [ 1899.797378] env[62508]: _type = "HttpNfcLease" [ 1899.797378] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1899.801079] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4279b5a-f184-4755-b1c2-22b8ac964b5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.831969] env[62508]: DEBUG nova.compute.manager [req-f773c922-1072-4c80-9335-3606ccfe0200 req-d33d6509-f2a9-4fd3-9fe6-c64985ebb9bf service nova] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Detach interface failed, port_id=cfd14da4-5054-4b3f-bb35-eeefcb6843a9, reason: Instance d3829b04-6d1f-44f0-8b94-30b582506ed4 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1899.905350] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1899.983950] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Successfully created port: cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.102111] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1900.102422] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19982886-b2d5-44e0-870f-296a41e0f663 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.109594] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1900.109594] env[62508]: value = "task-1776835" [ 1900.109594] env[62508]: _type = "Task" [ 1900.109594] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.117614] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.207761] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776832, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.218985] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.219292] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.220069] env[62508]: DEBUG nova.objects.instance [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lazy-loading 'resources' on Instance uuid d3829b04-6d1f-44f0-8b94-30b582506ed4 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1900.223446] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1900.223758] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1900.224071] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.224293] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1900.224525] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.224886] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1900.225115] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1900.225367] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1900.225623] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1900.225895] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1900.226183] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1900.236757] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea6fb1c3-074b-4689-9ba8-718509b885cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.258268] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.260435] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1900.260435] env[62508]: value = "task-1776836" [ 1900.260435] env[62508]: _type = "Task" [ 1900.260435] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.273730] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776836, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.298234] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1900.298234] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ce057-f06e-8727-96fa-35d5b1f90d47" [ 1900.298234] env[62508]: _type = "HttpNfcLease" [ 1900.298234] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1900.302371] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1900.302371] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521ce057-f06e-8727-96fa-35d5b1f90d47" [ 1900.302371] env[62508]: _type = "HttpNfcLease" [ 1900.302371] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1900.303918] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24d61e3-802b-4596-9ee5-d9de9fd6d797 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.315993] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1900.316399] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1900.465590] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d5119477-b91e-4637-9c3d-4cba36e2b073 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.472631] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0089d4b-841c-41c7-98ef-6d07ae7a5a00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.479513] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e95262-eafd-45ee-95b8-2e4b45ddbdae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.513434] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27e9353-df75-42ee-9b39-c2f2d4d9657a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.520765] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9329cfe2-a9d7-49df-b719-fafae3b43eb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.538723] env[62508]: DEBUG nova.compute.provider_tree [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.619782] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776835, 'name': PowerOffVM_Task, 'duration_secs': 0.242867} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.623397] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1900.624254] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe45780f-8d0f-4a3b-b264-fe3e13aefcac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.648184] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750d6929-ec08-42d0-9c29-9dabb77a4d49 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.707817] env[62508]: DEBUG oslo_vmware.api [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776832, 'name': PowerOnVM_Task, 'duration_secs': 0.610135} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.708960] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1900.708960] env[62508]: INFO nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Took 5.92 seconds to spawn the instance on the hypervisor. [ 1900.708960] env[62508]: DEBUG nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1900.709709] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae30c03-afad-4b73-a142-bb30caca7a76 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.745155] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776833, 'name': ReconfigVM_Task, 'duration_secs': 0.638639} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.745855] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 09918540-a9ce-4c76-84b9-fbe452d5abf3/09918540-a9ce-4c76-84b9-fbe452d5abf3.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1900.746143] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ce66406-3371-483b-99aa-6114d263e017 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.752604] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1900.752604] env[62508]: value = "task-1776837" [ 1900.752604] env[62508]: _type = "Task" [ 1900.752604] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.761922] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776837, 'name': Rename_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.769818] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776836, 'name': ReconfigVM_Task, 'duration_secs': 0.157928} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.770423] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 33 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1900.914958] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1900.944253] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1900.944768] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1900.945139] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.945528] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1900.945796] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.946097] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1900.946482] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1900.946783] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1900.947106] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1900.947391] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1900.947693] env[62508]: DEBUG nova.virt.hardware [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1900.949539] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46cf6a2-368c-4d33-97fe-94c26b8a5a3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.960192] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ae3b8f-f46e-4e08-89b1-575a8f7d1fd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.044271] env[62508]: DEBUG nova.scheduler.client.report [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1901.163118] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Creating Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1901.163790] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-29f975cb-593e-43b1-b76e-604331f2962b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.174668] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1901.174668] env[62508]: value = "task-1776838" [ 1901.174668] env[62508]: _type = "Task" [ 1901.174668] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.185598] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776838, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.227543] env[62508]: INFO nova.compute.manager [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Took 11.58 seconds to build instance. [ 1901.263121] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776837, 'name': Rename_Task, 'duration_secs': 0.147956} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.263525] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1901.263920] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34d9d29b-6e8f-406d-8d4a-b6e634603647 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.270067] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1901.270067] env[62508]: value = "task-1776839" [ 1901.270067] env[62508]: _type = "Task" [ 1901.270067] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.276496] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1901.276864] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1901.277141] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.278205] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1901.278205] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.278205] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1901.278205] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1901.278205] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1901.278382] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1901.278650] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1901.278832] env[62508]: DEBUG nova.virt.hardware [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1901.284801] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1901.288325] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1e17146-7993-49b8-8d1d-ea16695b4804 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.302234] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.308506] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1901.308506] env[62508]: value = "task-1776840" [ 1901.308506] env[62508]: _type = "Task" [ 1901.308506] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.318910] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776840, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.404904] env[62508]: DEBUG nova.compute.manager [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Received event network-vif-plugged-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1901.405212] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] Acquiring lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.405592] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] Lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.407850] env[62508]: DEBUG oslo_concurrency.lockutils [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] Lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.408173] env[62508]: DEBUG nova.compute.manager [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] No waiting events found dispatching network-vif-plugged-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1901.408457] env[62508]: WARNING nova.compute.manager [req-7b90f675-7c48-47dc-acad-2da7dc569db5 req-13de05b7-1bc5-48ef-b0fd-c5080b1395f9 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Received unexpected event network-vif-plugged-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d for instance with vm_state building and task_state spawning. [ 1901.550573] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.331s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.560766] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Successfully updated port: cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1901.580043] env[62508]: INFO nova.scheduler.client.report [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Deleted allocations for instance d3829b04-6d1f-44f0-8b94-30b582506ed4 [ 1901.662548] env[62508]: INFO nova.compute.manager [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Rebuilding instance [ 1901.688714] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776838, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.721821] env[62508]: DEBUG nova.compute.manager [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1901.723592] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc8971-8b49-48fd-9f6b-64f083a3ada5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.729269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-07e5962d-946b-4ef9-8ffe-d1c74ce995b0 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.089s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.781553] env[62508]: DEBUG oslo_vmware.api [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776839, 'name': PowerOnVM_Task, 'duration_secs': 0.468198} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.781913] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1901.782210] env[62508]: INFO nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Took 9.23 seconds to spawn the instance on the hypervisor. [ 1901.782520] env[62508]: DEBUG nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1901.783424] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd3a528-29fb-469e-8482-f0ac33f91807 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.818676] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776840, 'name': ReconfigVM_Task, 'duration_secs': 0.184498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.819052] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1901.819899] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b21d2d-65bb-46be-889a-1f5191428ee0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.843030] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1901.844222] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec1be64e-22cb-4d8d-b125-360285d603ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.867146] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1901.867146] env[62508]: value = "task-1776841" [ 1901.867146] env[62508]: _type = "Task" [ 1901.867146] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.876145] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.063770] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.064086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquired lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.064607] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.093189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cbe90cba-049c-415c-a737-c92376f7836c tempest-AttachVolumeTestJSON-1329440440 tempest-AttachVolumeTestJSON-1329440440-project-member] Lock "d3829b04-6d1f-44f0-8b94-30b582506ed4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.730s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.187255] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776838, 'name': CreateSnapshot_Task, 'duration_secs': 0.754509} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.187848] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Created Snapshot of the VM instance {{(pid=62508) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1902.188814] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252b8eeb-d66d-48a3-873a-155a46b1d3ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.237375] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1902.238220] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91ba52b1-db89-467d-b098-09549f3fce42 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.246716] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1902.246716] env[62508]: value = "task-1776842" [ 1902.246716] env[62508]: _type = "Task" [ 1902.246716] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.255906] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.301982] env[62508]: INFO nova.compute.manager [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Took 14.01 seconds to build instance. [ 1902.380468] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776841, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.624517] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1902.708286] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Creating linked-clone VM from snapshot {{(pid=62508) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1902.710813] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f3b6295e-94e0-4b56-9dd2-dff4d6f64bbf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.722600] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1902.722600] env[62508]: value = "task-1776843" [ 1902.722600] env[62508]: _type = "Task" [ 1902.722600] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.732596] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776843, 'name': CloneVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.757634] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776842, 'name': PowerOffVM_Task, 'duration_secs': 0.229739} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.757634] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1902.757634] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1902.758740] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1c45d0-0d33-4bb7-83a2-d6e1c508b7d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.767539] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1902.767539] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7660910a-2879-47d4-8631-3788303cb616 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.803894] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3128a48b-0834-4d1d-94e8-f291fbb0f57f tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.519s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.834026] env[62508]: DEBUG nova.network.neutron [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Updating instance_info_cache with network_info: [{"id": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "address": "fa:16:3e:ee:36:4f", "network": {"id": "aa1b3b9c-6da2-48a1-9999-f9a3a334f274", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1243728261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc62fa31b2f241f59bb410df89b334ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7a7d72-5b", "ovs_interfaceid": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.847586] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1902.847879] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1902.848100] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Deleting the datastore file [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1902.848787] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47651705-f9d0-4ff6-bfb3-1f6cb83f2c9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.857471] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1902.857471] env[62508]: value = "task-1776845" [ 1902.857471] env[62508]: _type = "Task" [ 1902.857471] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.865304] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.878089] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776841, 'name': ReconfigVM_Task, 'duration_secs': 0.594313} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.878420] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f/48d8f1ee-4d35-4a64-a72a-e4a505675c8f.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.878775] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 50 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1903.234217] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776843, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.337145] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Releasing lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.337546] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Instance network_info: |[{"id": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "address": "fa:16:3e:ee:36:4f", "network": {"id": "aa1b3b9c-6da2-48a1-9999-f9a3a334f274", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1243728261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc62fa31b2f241f59bb410df89b334ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7a7d72-5b", "ovs_interfaceid": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1903.338045] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:36:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd7a7d72-5b99-4e7b-a31f-21c68b88e67d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.346675] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Creating folder: Project (dc62fa31b2f241f59bb410df89b334ca). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1903.346983] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80d96f5f-9c6a-4777-8143-2af9ee3a4cf8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.358975] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Created folder: Project (dc62fa31b2f241f59bb410df89b334ca) in parent group-v368536. [ 1903.359379] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Creating folder: Instances. Parent ref: group-v368844. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1903.362784] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58d9a986-9b59-4349-ba00-002652f7efe1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.371987] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280643} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.372265] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1903.372498] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1903.372702] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1903.376630] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Created folder: Instances in parent group-v368844. [ 1903.376969] env[62508]: DEBUG oslo.service.loopingcall [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.377120] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.377530] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2da4b281-7558-407c-9c71-77437d506e01 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.396520] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e38614-f47c-4512-be03-7c276910cb26 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.419224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e490244a-d629-4bde-b9dc-9d94a0203ea8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.422300] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.422300] env[62508]: value = "task-1776849" [ 1903.422300] env[62508]: _type = "Task" [ 1903.422300] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.441795] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 67 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1903.450123] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776849, 'name': CreateVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.451502] env[62508]: DEBUG nova.compute.manager [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Received event network-changed-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.451679] env[62508]: DEBUG nova.compute.manager [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Refreshing instance network info cache due to event network-changed-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1903.451891] env[62508]: DEBUG oslo_concurrency.lockutils [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] Acquiring lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.452041] env[62508]: DEBUG oslo_concurrency.lockutils [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] Acquired lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.452208] env[62508]: DEBUG nova.network.neutron [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Refreshing network info cache for port cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1903.681196] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "5029bdd2-1f52-43ec-a978-b788b15a1204" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.681484] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.733545] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776843, 'name': CloneVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.932690] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776849, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.981804] env[62508]: DEBUG nova.network.neutron [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Port 4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 binding to destination host cpu-1 is already ACTIVE {{(pid=62508) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1904.184462] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1904.188123] env[62508]: DEBUG nova.network.neutron [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Updated VIF entry in instance network info cache for port cd7a7d72-5b99-4e7b-a31f-21c68b88e67d. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1904.188491] env[62508]: DEBUG nova.network.neutron [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Updating instance_info_cache with network_info: [{"id": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "address": "fa:16:3e:ee:36:4f", "network": {"id": "aa1b3b9c-6da2-48a1-9999-f9a3a334f274", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1243728261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc62fa31b2f241f59bb410df89b334ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7a7d72-5b", "ovs_interfaceid": "cd7a7d72-5b99-4e7b-a31f-21c68b88e67d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.236033] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776843, 'name': CloneVM_Task} progress is 95%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.429970] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1904.430290] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1904.430488] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1904.430740] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1904.430933] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1904.431179] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1904.431413] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1904.431614] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1904.431817] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1904.432064] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1904.432296] env[62508]: DEBUG nova.virt.hardware [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1904.433224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc0dc60-f71c-4f92-9014-19596448027e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.441692] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776849, 'name': CreateVM_Task, 'duration_secs': 0.683076} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.443730] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1904.444512] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.444689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.445024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1904.446271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90ff648-f785-4914-941c-f914a1a0377c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.450097] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c474e8-2781-4e69-9f0a-cb32e3179a54 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.455065] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1904.455065] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52572576-4aee-2180-a64f-308dfb91f3fc" [ 1904.455065] env[62508]: _type = "Task" [ 1904.455065] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.462991] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1904.468852] env[62508]: DEBUG oslo.service.loopingcall [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.471981] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1904.472244] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd383033-b7a3-4fa3-ae3b-bdff6cc403d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.493553] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52572576-4aee-2180-a64f-308dfb91f3fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011387} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.493553] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.493743] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1904.493904] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.494022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.494184] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1904.494457] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e656e6dc-62cd-4c72-b50e-b1bed6bd37b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.497372] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1904.497372] env[62508]: value = "task-1776850" [ 1904.497372] env[62508]: _type = "Task" [ 1904.497372] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.505470] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776850, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.506557] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1904.506740] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1904.507489] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b855b46-b7d4-4cd5-84f3-d6ba63f32fd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.512676] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1904.512676] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d28446-15cd-3491-3868-b6cf07749c27" [ 1904.512676] env[62508]: _type = "Task" [ 1904.512676] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.520610] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d28446-15cd-3491-3868-b6cf07749c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.691228] env[62508]: DEBUG oslo_concurrency.lockutils [req-61026a35-2b80-41f2-afe6-35dabbad9f80 req-f9e6b36c-9aa0-41bb-ac6f-b14b7e11fef2 service nova] Releasing lock "refresh_cache-1df05ee1-d92d-45be-8337-eba4322bda66" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.711991] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.712301] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.713930] env[62508]: INFO nova.compute.claims [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.735642] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776843, 'name': CloneVM_Task, 'duration_secs': 1.672127} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.736040] env[62508]: INFO nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Created linked-clone VM from snapshot [ 1904.736824] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32c85de-86ed-4d0b-992e-012b45e4a29d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.745403] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Uploading image 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1904.768701] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1904.768701] env[62508]: value = "vm-368843" [ 1904.768701] env[62508]: _type = "VirtualMachine" [ 1904.768701] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1904.768949] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-86c563cd-d384-4578-a20f-ae74142df78d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.777579] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease: (returnval){ [ 1904.777579] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522c8c38-b0ec-b947-d9a3-d45a6f2ddd3e" [ 1904.777579] env[62508]: _type = "HttpNfcLease" [ 1904.777579] env[62508]: } obtained for exporting VM: (result){ [ 1904.777579] env[62508]: value = "vm-368843" [ 1904.777579] env[62508]: _type = "VirtualMachine" [ 1904.777579] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1904.778154] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the lease: (returnval){ [ 1904.778154] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522c8c38-b0ec-b947-d9a3-d45a6f2ddd3e" [ 1904.778154] env[62508]: _type = "HttpNfcLease" [ 1904.778154] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1904.790483] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1904.790483] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522c8c38-b0ec-b947-d9a3-d45a6f2ddd3e" [ 1904.790483] env[62508]: _type = "HttpNfcLease" [ 1904.790483] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1905.015482] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.015482] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.015482] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.025187] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776850, 'name': CreateVM_Task, 'duration_secs': 0.380113} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.025837] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1905.026306] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.026494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.026824] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1905.027315] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f15399bf-aa3a-4ca1-9114-d966155d3dcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.033703] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52d28446-15cd-3491-3868-b6cf07749c27, 'name': SearchDatastore_Task, 'duration_secs': 0.014649} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.035034] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ebaaa41-2eaf-4d89-80b7-0e82e489296c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.038552] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1905.038552] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5250caf8-d1de-197f-755c-f2e700e8e594" [ 1905.038552] env[62508]: _type = "Task" [ 1905.038552] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.043588] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1905.043588] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7957e-d85b-8e48-95d4-e75b48a9ab92" [ 1905.043588] env[62508]: _type = "Task" [ 1905.043588] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.050573] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5250caf8-d1de-197f-755c-f2e700e8e594, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.056398] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e7957e-d85b-8e48-95d4-e75b48a9ab92, 'name': SearchDatastore_Task, 'duration_secs': 0.011867} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.056645] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.056901] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 1df05ee1-d92d-45be-8337-eba4322bda66/1df05ee1-d92d-45be-8337-eba4322bda66.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1905.057176] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c801b34-512b-41fd-b183-1475a32903d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.063150] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1905.063150] env[62508]: value = "task-1776852" [ 1905.063150] env[62508]: _type = "Task" [ 1905.063150] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.287137] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1905.287137] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522c8c38-b0ec-b947-d9a3-d45a6f2ddd3e" [ 1905.287137] env[62508]: _type = "HttpNfcLease" [ 1905.287137] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1905.287485] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1905.287485] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]522c8c38-b0ec-b947-d9a3-d45a6f2ddd3e" [ 1905.287485] env[62508]: _type = "HttpNfcLease" [ 1905.287485] env[62508]: }. {{(pid=62508) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1905.288311] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48c9d27-62b0-4fac-9a5d-157bbe58c7f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.296757] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1905.296956] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk for reading. {{(pid=62508) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1905.551621] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5250caf8-d1de-197f-755c-f2e700e8e594, 'name': SearchDatastore_Task, 'duration_secs': 0.012881} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.551963] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.552220] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1905.552494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.552748] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.553056] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1905.553352] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-643b79b5-6e2d-4538-a7a2-6cfc59281ada {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.570391] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1905.570656] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1905.574373] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1887172a-4923-4657-9e9f-7a294048c7c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.576950] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776852, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.582285] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1905.582285] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520b0351-0f96-837d-d243-a96d2ff1b15b" [ 1905.582285] env[62508]: _type = "Task" [ 1905.582285] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.591249] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520b0351-0f96-837d-d243-a96d2ff1b15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.647230] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-27da0c00-a472-4b72-9480-5df85b9cc9d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.864411] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b141f0f5-b2aa-43f0-8fbd-772651af3e5e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.872143] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769aa6ab-74cb-45f5-bcab-f94cd491499b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.903216] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8785ea8e-e8b5-4e1c-9e8f-0bfe80772f31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.910742] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c658f6-848e-4636-9349-047e84e86fc2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.924843] env[62508]: DEBUG nova.compute.provider_tree [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1906.069877] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.070079] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.070258] env[62508]: DEBUG nova.network.neutron [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1906.074816] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671448} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.075864] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 1df05ee1-d92d-45be-8337-eba4322bda66/1df05ee1-d92d-45be-8337-eba4322bda66.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1906.075864] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1906.075864] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-362890d8-ebd2-42be-ab2d-4cae1eef1a13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.082785] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1906.082785] env[62508]: value = "task-1776853" [ 1906.082785] env[62508]: _type = "Task" [ 1906.082785] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.096350] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]520b0351-0f96-837d-d243-a96d2ff1b15b, 'name': SearchDatastore_Task, 'duration_secs': 0.066532} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.100388] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.100821] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e32d3f2d-cb7f-4a25-b61b-b3f9b31a8c8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.106750] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1906.106750] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e86048-a5c7-ee68-e916-29913f62aaef" [ 1906.106750] env[62508]: _type = "Task" [ 1906.106750] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.115641] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e86048-a5c7-ee68-e916-29913f62aaef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.428225] env[62508]: DEBUG nova.scheduler.client.report [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1906.594465] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068718} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.594465] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1906.595321] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2279d76c-8b0f-404e-8871-2c6ba6179e6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.620659] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 1df05ee1-d92d-45be-8337-eba4322bda66/1df05ee1-d92d-45be-8337-eba4322bda66.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.624705] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91dd5c8c-ac9b-48f0-affa-d29c1787f8c3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.648154] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e86048-a5c7-ee68-e916-29913f62aaef, 'name': SearchDatastore_Task, 'duration_secs': 0.013882} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.649681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.650488] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1906.650488] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1906.650488] env[62508]: value = "task-1776855" [ 1906.650488] env[62508]: _type = "Task" [ 1906.650488] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.650772] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3527b233-df3e-4c2d-8d60-a2f2223c5554 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.660490] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1906.660490] env[62508]: value = "task-1776856" [ 1906.660490] env[62508]: _type = "Task" [ 1906.660490] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.664267] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776855, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.673181] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.845978] env[62508]: DEBUG nova.network.neutron [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [{"id": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "address": "fa:16:3e:13:90:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae7fae0-9c", "ovs_interfaceid": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.933885] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.934563] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1907.165553] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.174917] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776856, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.353250] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.440154] env[62508]: DEBUG nova.compute.utils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1907.441928] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1907.442224] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1907.486446] env[62508]: DEBUG nova.policy [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1907.667476] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776855, 'name': ReconfigVM_Task, 'duration_secs': 0.805498} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.670814] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 1df05ee1-d92d-45be-8337-eba4322bda66/1df05ee1-d92d-45be-8337-eba4322bda66.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1907.671555] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-910c3cdc-2937-4c2d-bd11-410ab10ff594 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.678773] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776856, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69256} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.680253] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1907.680541] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1907.680859] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1907.680859] env[62508]: value = "task-1776857" [ 1907.680859] env[62508]: _type = "Task" [ 1907.680859] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.681183] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eca6798e-a1f1-4edd-8620-e9ae3bfa05fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.695401] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776857, 'name': Rename_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.696910] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1907.696910] env[62508]: value = "task-1776858" [ 1907.696910] env[62508]: _type = "Task" [ 1907.696910] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.705670] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.747823] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Successfully created port: 996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1907.888024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9951dbe8-4e46-4c79-a314-c38ffe8ddc04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.907759] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd22bd6f-81b4-42cf-afb7-dcb8573823e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.916040] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 83 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1907.945861] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1908.196099] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776857, 'name': Rename_Task, 'duration_secs': 0.222254} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.196658] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.201385] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77b77181-8430-44c2-8cba-28a208d17f0b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.210054] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072239} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.211863] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1908.212433] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1908.212433] env[62508]: value = "task-1776859" [ 1908.212433] env[62508]: _type = "Task" [ 1908.212433] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.213481] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a81390-cf4e-45f9-a408-a73cbc0eec87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.247123] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1908.253076] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9e5e2dd-76b5-40d6-a720-8c81e1d5a108 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.276059] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776859, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.283301] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1908.283301] env[62508]: value = "task-1776860" [ 1908.283301] env[62508]: _type = "Task" [ 1908.283301] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.294478] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776860, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.424926] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.425794] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5faf40f-8c33-40bc-9fd8-2a59322467ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.436481] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1908.436481] env[62508]: value = "task-1776861" [ 1908.436481] env[62508]: _type = "Task" [ 1908.436481] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.463978] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.727064] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776859, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.795617] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776860, 'name': ReconfigVM_Task, 'duration_secs': 0.509451} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.795952] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Reconfigured VM instance instance-00000072 to attach disk [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5/a63241ff-830a-4724-82ef-ad6c8836d2f5.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1908.796627] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-521d1a13-a1d1-4b46-95be-fb703cef73ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.802501] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1908.802501] env[62508]: value = "task-1776862" [ 1908.802501] env[62508]: _type = "Task" [ 1908.802501] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.810232] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776862, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.948641] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776861, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.966334] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1909.228368] env[62508]: DEBUG oslo_vmware.api [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776859, 'name': PowerOnVM_Task, 'duration_secs': 0.699376} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.228661] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1909.228866] env[62508]: INFO nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Took 8.31 seconds to spawn the instance on the hypervisor. [ 1909.229057] env[62508]: DEBUG nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1909.229843] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5bf332-e707-4f83-b803-8aee56140077 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.313505] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776862, 'name': Rename_Task, 'duration_secs': 0.257149} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.313825] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.314121] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80cc8c83-b6c1-4621-8614-ec37a365252f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.321290] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1909.321290] env[62508]: value = "task-1776863" [ 1909.321290] env[62508]: _type = "Task" [ 1909.321290] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.330356] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.448242] env[62508]: DEBUG oslo_vmware.api [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776861, 'name': PowerOnVM_Task, 'duration_secs': 0.602799} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.448461] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1909.448722] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6df152-fe56-4a69-a705-844842f6e2b8 tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance '48d8f1ee-4d35-4a64-a72a-e4a505675c8f' progress to 100 {{(pid=62508) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.748963] env[62508]: INFO nova.compute.manager [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Took 14.11 seconds to build instance. [ 1909.833021] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776863, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.143453] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1910.143808] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1910.143915] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1910.144124] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1910.144274] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1910.144422] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1910.144629] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1910.144790] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1910.144955] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1910.145132] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1910.145304] env[62508]: DEBUG nova.virt.hardware [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1910.147382] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e39e76-938e-4623-826b-ef88cfbee789 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.156826] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1910.158134] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c6dbd5-86c4-415a-a594-4b5c943986f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.162964] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbb5556-60bc-4703-8db8-fe0ef1be438e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.171361] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1910.171528] env[62508]: ERROR oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk due to incomplete transfer. [ 1910.179448] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-614e6597-97c0-4378-93b8-ff342e10c38f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.196750] env[62508]: DEBUG oslo_vmware.rw_handles [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526f714c-0b45-2a3f-4170-fb3f0580fbca/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1910.196951] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Uploaded image 353a10e7-4163-47d9-8158-bb4b40bd7029 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1910.199461] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1910.199727] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cbf4cee3-c94b-4e94-8e41-4a367272b74a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.206448] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1910.206448] env[62508]: value = "task-1776865" [ 1910.206448] env[62508]: _type = "Task" [ 1910.206448] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.214975] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776865, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.237966] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "1df05ee1-d92d-45be-8337-eba4322bda66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.252761] env[62508]: DEBUG oslo_concurrency.lockutils [None req-06ca1f3e-efca-4bc0-8928-e481a6b9f39e tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.619s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.253152] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.015s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.253387] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.253595] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.253766] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.256044] env[62508]: INFO nova.compute.manager [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Terminating instance [ 1910.257980] env[62508]: DEBUG nova.compute.manager [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1910.258197] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1910.259107] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4152646-5bf9-4880-b316-b5e22d86fe90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.267431] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.267684] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0876f70f-cf89-46ef-9d75-b7f4d12add6e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.274017] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1910.274017] env[62508]: value = "task-1776866" [ 1910.274017] env[62508]: _type = "Task" [ 1910.274017] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.282269] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.331427] env[62508]: DEBUG oslo_vmware.api [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776863, 'name': PowerOnVM_Task, 'duration_secs': 0.893071} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.331676] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1910.331903] env[62508]: DEBUG nova.compute.manager [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1910.332749] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d4c886-238a-4db0-8620-2eebb7b22944 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.717430] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776865, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.784522] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776866, 'name': PowerOffVM_Task, 'duration_secs': 0.365024} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.784787] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1910.784960] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1910.785238] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e721c449-544e-473f-a99e-74459e832c22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.850983] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.851213] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.851383] env[62508]: DEBUG nova.objects.instance [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1911.219244] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776865, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.384467] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1911.384853] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1911.385267] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Deleting the datastore file [datastore1] 1df05ee1-d92d-45be-8337-eba4322bda66 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1911.385713] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d94901bc-8dbd-40bb-b165-cea09c046c4a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.393891] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for the task: (returnval){ [ 1911.393891] env[62508]: value = "task-1776868" [ 1911.393891] env[62508]: _type = "Task" [ 1911.393891] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.407792] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.438188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.438634] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.439020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "a63241ff-830a-4724-82ef-ad6c8836d2f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.439352] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.439660] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.442781] env[62508]: INFO nova.compute.manager [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Terminating instance [ 1911.445415] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "refresh_cache-a63241ff-830a-4724-82ef-ad6c8836d2f5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.445686] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquired lock "refresh_cache-a63241ff-830a-4724-82ef-ad6c8836d2f5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.445978] env[62508]: DEBUG nova.network.neutron [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1911.720182] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776865, 'name': Destroy_Task, 'duration_secs': 1.439676} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.720459] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Destroyed the VM [ 1911.720728] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1911.720988] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d0d8eaf8-0ac2-41c6-bee0-135dd9357558 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.727717] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1911.727717] env[62508]: value = "task-1776869" [ 1911.727717] env[62508]: _type = "Task" [ 1911.727717] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.736884] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776869, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.807132] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.807404] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.807601] env[62508]: DEBUG nova.compute.manager [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Going to confirm migration 10 {{(pid=62508) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1911.860110] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5860538f-0d42-4cfe-a30c-745b853709cb tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.904462] env[62508]: DEBUG oslo_vmware.api [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Task: {'id': task-1776868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344931} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.904738] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1911.904930] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1911.905121] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1911.905298] env[62508]: INFO nova.compute.manager [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1911.905545] env[62508]: DEBUG oslo.service.loopingcall [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.905763] env[62508]: DEBUG nova.compute.manager [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1911.905880] env[62508]: DEBUG nova.network.neutron [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.967443] env[62508]: DEBUG nova.network.neutron [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1912.082076] env[62508]: DEBUG nova.network.neutron [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.236123] env[62508]: DEBUG nova.compute.manager [req-32c72e41-15d3-4a72-9225-bb8a6184ce0b req-019a5712-dcb4-4842-8281-d0902409cdc6 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Received event network-vif-deleted-cd7a7d72-5b99-4e7b-a31f-21c68b88e67d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.236123] env[62508]: INFO nova.compute.manager [req-32c72e41-15d3-4a72-9225-bb8a6184ce0b req-019a5712-dcb4-4842-8281-d0902409cdc6 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Neutron deleted interface cd7a7d72-5b99-4e7b-a31f-21c68b88e67d; detaching it from the instance and deleting it from the info cache [ 1912.236123] env[62508]: DEBUG nova.network.neutron [req-32c72e41-15d3-4a72-9225-bb8a6184ce0b req-019a5712-dcb4-4842-8281-d0902409cdc6 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.244324] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776869, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.382365] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.382579] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquired lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.382780] env[62508]: DEBUG nova.network.neutron [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.382980] env[62508]: DEBUG nova.objects.instance [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'info_cache' on Instance uuid 48d8f1ee-4d35-4a64-a72a-e4a505675c8f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.418732] env[62508]: DEBUG nova.compute.manager [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Received event network-vif-plugged-996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.418993] env[62508]: DEBUG oslo_concurrency.lockutils [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] Acquiring lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.419249] env[62508]: DEBUG oslo_concurrency.lockutils [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.419464] env[62508]: DEBUG oslo_concurrency.lockutils [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.419651] env[62508]: DEBUG nova.compute.manager [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] No waiting events found dispatching network-vif-plugged-996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1912.419824] env[62508]: WARNING nova.compute.manager [req-269f2187-98f3-4895-98fc-bc9e4b6d4c2b req-a529fc31-bbf4-48b7-bb52-c489bd11a9ed service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Received unexpected event network-vif-plugged-996a8114-49d9-442e-8799-c2866997e84d for instance with vm_state building and task_state spawning. [ 1912.585329] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Releasing lock "refresh_cache-a63241ff-830a-4724-82ef-ad6c8836d2f5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.585779] env[62508]: DEBUG nova.compute.manager [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1912.585976] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1912.586908] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3674f0f-e674-4872-b2e2-b6099ba2d34a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.596823] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1912.597121] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-845e3688-22d1-43a5-8d16-3b0345fc9c59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.604941] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1912.604941] env[62508]: value = "task-1776870" [ 1912.604941] env[62508]: _type = "Task" [ 1912.604941] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.613883] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.654014] env[62508]: DEBUG nova.network.neutron [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.740051] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776869, 'name': RemoveSnapshot_Task, 'duration_secs': 0.899219} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.740309] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ee92365-89d1-4c52-986e-8a008f94b687 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.742293] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1912.742716] env[62508]: DEBUG nova.compute.manager [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1912.743467] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bf9e06-f9dc-4fff-9950-875f567017c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.757586] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2130b6ab-c74e-43fd-962e-880d7989dce4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.788019] env[62508]: DEBUG nova.compute.manager [req-32c72e41-15d3-4a72-9225-bb8a6184ce0b req-019a5712-dcb4-4842-8281-d0902409cdc6 service nova] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Detach interface failed, port_id=cd7a7d72-5b99-4e7b-a31f-21c68b88e67d, reason: Instance 1df05ee1-d92d-45be-8337-eba4322bda66 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1912.851708] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Successfully updated port: 996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1912.881470] env[62508]: DEBUG nova.compute.manager [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Received event network-changed-996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.881609] env[62508]: DEBUG nova.compute.manager [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Refreshing instance network info cache due to event network-changed-996a8114-49d9-442e-8799-c2866997e84d. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1912.881896] env[62508]: DEBUG oslo_concurrency.lockutils [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] Acquiring lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.882202] env[62508]: DEBUG oslo_concurrency.lockutils [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] Acquired lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.882456] env[62508]: DEBUG nova.network.neutron [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Refreshing network info cache for port 996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1913.115523] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776870, 'name': PowerOffVM_Task, 'duration_secs': 0.216971} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.115866] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1913.116095] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1913.116377] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1aef6637-09a4-4a55-b285-13deaa13d0af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.142029] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1913.142292] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1913.142443] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Deleting the datastore file [datastore1] a63241ff-830a-4724-82ef-ad6c8836d2f5 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1913.142799] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cd11d5f-cc02-49e9-8872-d102b088f850 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.149396] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for the task: (returnval){ [ 1913.149396] env[62508]: value = "task-1776872" [ 1913.149396] env[62508]: _type = "Task" [ 1913.149396] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.158650] env[62508]: INFO nova.compute.manager [-] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Took 1.25 seconds to deallocate network for instance. [ 1913.158964] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.258405] env[62508]: INFO nova.compute.manager [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Shelve offloading [ 1913.260216] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1913.260491] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11d2c7a8-3dc7-46ed-998f-2946df75b3c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.267878] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1913.267878] env[62508]: value = "task-1776873" [ 1913.267878] env[62508]: _type = "Task" [ 1913.267878] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.282869] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.356281] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.443176] env[62508]: DEBUG nova.network.neutron [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.573306] env[62508]: DEBUG nova.network.neutron [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.665524] env[62508]: DEBUG oslo_vmware.api [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Task: {'id': task-1776872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148695} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.666823] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1913.667046] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1913.667235] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1913.667428] env[62508]: INFO nova.compute.manager [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1913.667678] env[62508]: DEBUG oslo.service.loopingcall [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.670942] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.671193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.671407] env[62508]: DEBUG nova.objects.instance [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lazy-loading 'resources' on Instance uuid 1df05ee1-d92d-45be-8337-eba4322bda66 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.672402] env[62508]: DEBUG nova.compute.manager [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1913.672511] env[62508]: DEBUG nova.network.neutron [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1913.694266] env[62508]: DEBUG nova.network.neutron [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.716825] env[62508]: DEBUG nova.network.neutron [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [{"id": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "address": "fa:16:3e:13:90:11", "network": {"id": "8a661f83-74cd-405e-bb65-276ef82e92e5", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1165669084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e90ec7156574be597a12f4fa0e8c1dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae7fae0-9c", "ovs_interfaceid": "4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.780171] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1913.780171] env[62508]: DEBUG nova.compute.manager [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1913.781110] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7bcd90-79c0-4bc2-bcb5-af31052d030e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.788773] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.789025] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.789250] env[62508]: DEBUG nova.network.neutron [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.077243] env[62508]: DEBUG oslo_concurrency.lockutils [req-93c7d127-3f6d-4fd4-9847-b0e1eedddc41 req-9f6856bf-f172-4f4b-82ce-68b4a4237045 service nova] Releasing lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.077687] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.077847] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.197955] env[62508]: DEBUG nova.network.neutron [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.220347] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Releasing lock "refresh_cache-48d8f1ee-4d35-4a64-a72a-e4a505675c8f" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.222982] env[62508]: DEBUG nova.objects.instance [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lazy-loading 'migration_context' on Instance uuid 48d8f1ee-4d35-4a64-a72a-e4a505675c8f {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1914.331275] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6232c9c-78f7-4bbe-a5eb-a110b8431e8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.339543] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff4f468-9a9d-477a-96d4-db386cd95d30 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.377254] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d79a63-9472-4fb9-b89d-73acd698b77f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.386190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1ed3f3-9162-4570-915e-63032ff59bc5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.390665] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.390881] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.391036] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1914.391179] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1914.403605] env[62508]: DEBUG nova.compute.provider_tree [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.553672] env[62508]: DEBUG nova.network.neutron [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.609458] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.704671] env[62508]: INFO nova.compute.manager [-] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Took 1.03 seconds to deallocate network for instance. [ 1914.726066] env[62508]: DEBUG nova.objects.base [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Object Instance<48d8f1ee-4d35-4a64-a72a-e4a505675c8f> lazy-loaded attributes: info_cache,migration_context {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1914.726066] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce9fa32-e5cb-4fb9-b6de-b2f75e498d47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.746210] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2fcdafb-97f4-48f4-823a-a035a02103e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.752852] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1914.752852] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c445e4-9a0f-984b-e2e4-04ba4011eff5" [ 1914.752852] env[62508]: _type = "Task" [ 1914.752852] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.756897] env[62508]: DEBUG nova.network.neutron [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Updating instance_info_cache with network_info: [{"id": "996a8114-49d9-442e-8799-c2866997e84d", "address": "fa:16:3e:38:2f:fd", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap996a8114-49", "ovs_interfaceid": "996a8114-49d9-442e-8799-c2866997e84d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.764478] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c445e4-9a0f-984b-e2e4-04ba4011eff5, 'name': SearchDatastore_Task, 'duration_secs': 0.009545} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.764750] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.908117] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Skipping network cache update for instance because it is being deleted. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1914.908291] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Skipping network cache update for instance because it is being deleted. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1914.908429] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Skipping network cache update for instance because it is Building. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1914.911327] env[62508]: DEBUG nova.scheduler.client.report [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1914.943519] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.943693] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.943852] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1914.944021] env[62508]: DEBUG nova.objects.instance [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lazy-loading 'info_cache' on Instance uuid cd2424b1-3842-4df4-8636-23417833ea49 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.056874] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.212771] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.260178] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-5029bdd2-1f52-43ec-a978-b788b15a1204" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.260544] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance network_info: |[{"id": "996a8114-49d9-442e-8799-c2866997e84d", "address": "fa:16:3e:38:2f:fd", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap996a8114-49", "ovs_interfaceid": "996a8114-49d9-442e-8799-c2866997e84d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1915.260995] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:2f:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '996a8114-49d9-442e-8799-c2866997e84d', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.269284] env[62508]: DEBUG oslo.service.loopingcall [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.269530] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1915.269766] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3aaf512d-377a-4abd-b59b-3e2b44118c56 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.295597] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.295597] env[62508]: value = "task-1776874" [ 1915.295597] env[62508]: _type = "Task" [ 1915.295597] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.301403] env[62508]: DEBUG nova.compute.manager [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-vif-unplugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1915.301563] env[62508]: DEBUG oslo_concurrency.lockutils [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.301682] env[62508]: DEBUG oslo_concurrency.lockutils [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.301852] env[62508]: DEBUG oslo_concurrency.lockutils [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.302073] env[62508]: DEBUG nova.compute.manager [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] No waiting events found dispatching network-vif-unplugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1915.302194] env[62508]: WARNING nova.compute.manager [req-cef9ffa4-790a-42aa-bd9c-5063245dcb1a req-de02d7af-6890-4b48-af6b-b46fccb2106c service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received unexpected event network-vif-unplugged-60434e32-b866-43d2-8a33-8925c4459e29 for instance with vm_state shelved and task_state shelving_offloading. [ 1915.308938] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776874, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.360861] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1915.361903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5013f15-ec1c-4ce6-adc7-c8ca1d6e17d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.370276] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1915.370592] env[62508]: ERROR oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk due to incomplete transfer. [ 1915.371257] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1fcd984f-5ee4-4ccc-b086-23b18eaa020b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.381766] env[62508]: DEBUG oslo_vmware.rw_handles [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52156793-e1c4-d1c7-2bd9-32b74e09c90d/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1915.382058] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Uploaded image 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 to the Glance image server {{(pid=62508) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1915.385934] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Destroying the VM {{(pid=62508) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1915.387330] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1915.387704] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-af82512e-1fa0-464d-9c5a-e77af528fb9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.390431] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc3cc6e-fb7a-45d2-a48e-a280e4248703 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.401998] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1915.403394] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6b79d67-a5bf-4604-8fa4-f26da8617a1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.405090] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1915.405090] env[62508]: value = "task-1776875" [ 1915.405090] env[62508]: _type = "Task" [ 1915.405090] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.414605] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776875, 'name': Destroy_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.416438] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.418955] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.654s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.440994] env[62508]: INFO nova.scheduler.client.report [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Deleted allocations for instance 1df05ee1-d92d-45be-8337-eba4322bda66 [ 1915.502373] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1915.502747] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1915.502945] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1915.503237] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae104910-f797-4d6e-998f-008007a7b335 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.510758] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1915.510758] env[62508]: value = "task-1776877" [ 1915.510758] env[62508]: _type = "Task" [ 1915.510758] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.518873] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.806675] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776874, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.915644] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776875, 'name': Destroy_Task} progress is 33%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.958086] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1b5c4996-aa26-44f7-b624-00022e8a40a0 tempest-ServerGroupTestJSON-2119981612 tempest-ServerGroupTestJSON-2119981612-project-member] Lock "1df05ee1-d92d-45be-8337-eba4322bda66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.705s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.022494] env[62508]: DEBUG oslo_vmware.api [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432372} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.022713] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1916.022915] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1916.023127] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1916.041912] env[62508]: INFO nova.scheduler.client.report [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted allocations for instance aea987d3-1daf-45f5-84c3-893eb6bdb57a [ 1916.046162] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdead6a4-fb76-4ac0-909b-272cb5a31328 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.054220] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e87f91-c4bc-4277-870a-acbd8852f7ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.086762] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8970b5ba-7d37-4217-b348-26a9531b44e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.095281] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c5532b-68d1-4529-b8c4-698e790e0bb5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.110391] env[62508]: DEBUG nova.compute.provider_tree [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1916.306369] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776874, 'name': CreateVM_Task, 'duration_secs': 0.660182} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.306532] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1916.307232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.307402] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.307774] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1916.308048] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-802f28bb-cf7c-41ba-9f1a-d21baa9b2cb6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.313212] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1916.313212] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241089d-91a6-c98a-f9d1-f602a10fd86d" [ 1916.313212] env[62508]: _type = "Task" [ 1916.313212] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.321667] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241089d-91a6-c98a-f9d1-f602a10fd86d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.416293] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776875, 'name': Destroy_Task, 'duration_secs': 0.780979} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.416604] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Destroyed the VM [ 1916.416840] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleting Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1916.417072] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-776d0562-b591-4adb-8185-560b7c1f84ea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.424493] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1916.424493] env[62508]: value = "task-1776878" [ 1916.424493] env[62508]: _type = "Task" [ 1916.424493] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.433969] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776878, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.550783] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.613301] env[62508]: DEBUG nova.scheduler.client.report [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1916.740554] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updating instance_info_cache with network_info: [{"id": "405eb618-22d6-4623-a68c-d19671b3adf1", "address": "fa:16:3e:05:41:ee", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405eb618-22", "ovs_interfaceid": "405eb618-22d6-4623-a68c-d19671b3adf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.824157] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5241089d-91a6-c98a-f9d1-f602a10fd86d, 'name': SearchDatastore_Task, 'duration_secs': 0.010838} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.824468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.824702] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1916.824938] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.825104] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.825276] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.825623] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a44457bc-9d48-4582-ab65-a74565815dcc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.835351] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.835540] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1916.836445] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e22088a2-3039-49cc-807d-d4583aeff89e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.842408] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1916.842408] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b47c0e-e1dd-db6c-d241-9fa283d84156" [ 1916.842408] env[62508]: _type = "Task" [ 1916.842408] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.852096] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b47c0e-e1dd-db6c-d241-9fa283d84156, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.934964] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776878, 'name': RemoveSnapshot_Task, 'duration_secs': 0.387555} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.935313] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleted Snapshot of the VM instance {{(pid=62508) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1916.935597] env[62508]: DEBUG nova.compute.manager [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1916.936419] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea98a381-ee2f-4501-acfc-0960879e6b1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.243410] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-cd2424b1-3842-4df4-8636-23417833ea49" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.243712] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1917.243947] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244131] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244295] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244451] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244669] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244730] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.244861] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1917.245024] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.356236] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b47c0e-e1dd-db6c-d241-9fa283d84156, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.356992] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e31a9760-5931-4336-bfc5-ed83a2c6b802 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.363573] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1917.363573] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5279d481-c9cb-03be-f699-bd2811e10c42" [ 1917.363573] env[62508]: _type = "Task" [ 1917.363573] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.374110] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5279d481-c9cb-03be-f699-bd2811e10c42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.453906] env[62508]: INFO nova.compute.manager [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Shelve offloading [ 1917.456049] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1917.456298] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ba59a3a-7f8d-4d32-88a6-7ab94c4e256e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.464567] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1917.464567] env[62508]: value = "task-1776879" [ 1917.464567] env[62508]: _type = "Task" [ 1917.464567] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.473359] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.513949] env[62508]: DEBUG nova.compute.manager [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-changed-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1917.513949] env[62508]: DEBUG nova.compute.manager [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing instance network info cache due to event network-changed-60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1917.514777] env[62508]: DEBUG oslo_concurrency.lockutils [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.514777] env[62508]: DEBUG oslo_concurrency.lockutils [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.514777] env[62508]: DEBUG nova.network.neutron [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1917.625761] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.207s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.628615] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.416s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.628846] env[62508]: DEBUG nova.objects.instance [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lazy-loading 'resources' on Instance uuid a63241ff-830a-4724-82ef-ad6c8836d2f5 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.749337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.874895] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5279d481-c9cb-03be-f699-bd2811e10c42, 'name': SearchDatastore_Task, 'duration_secs': 0.010949} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.875199] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.875457] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5029bdd2-1f52-43ec-a978-b788b15a1204/5029bdd2-1f52-43ec-a978-b788b15a1204.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1917.875721] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-856ea56c-370d-499f-a9b1-75807b67f2c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.882956] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1917.882956] env[62508]: value = "task-1776880" [ 1917.882956] env[62508]: _type = "Task" [ 1917.882956] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.891418] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.976476] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1917.976732] env[62508]: DEBUG nova.compute.manager [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1917.977593] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48ed975-36b8-4d2f-94f6-7118f45a305c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.984014] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.984188] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.984500] env[62508]: DEBUG nova.network.neutron [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1918.227157] env[62508]: INFO nova.scheduler.client.report [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocation for migration 5b2a11ac-2293-49bf-ac02-457b44128118 [ 1918.286819] env[62508]: DEBUG nova.network.neutron [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updated VIF entry in instance network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1918.287221] env[62508]: DEBUG nova.network.neutron [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap60434e32-b8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.312250] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec3ec87-2b4f-467b-bfd0-1d9f3fd818a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.331844] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78e34fc-8426-45f0-9d48-5b98aea57440 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.373620] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa83a19-d8d8-4596-b866-c3a7a23b9521 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.383603] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79ad91c-c660-4cb3-8a29-be62e081b441 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.397833] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506523} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.405879] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 5029bdd2-1f52-43ec-a978-b788b15a1204/5029bdd2-1f52-43ec-a978-b788b15a1204.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1918.406159] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1918.406994] env[62508]: DEBUG nova.compute.provider_tree [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.408362] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2979aee4-9d30-46f7-9153-df5bd88fced6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.416532] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1918.416532] env[62508]: value = "task-1776881" [ 1918.416532] env[62508]: _type = "Task" [ 1918.416532] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.427182] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.740280] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.933s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.785324] env[62508]: DEBUG nova.network.neutron [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.794609] env[62508]: DEBUG oslo_concurrency.lockutils [req-44b6e09d-e7c7-4f0d-8fd4-e4351c2ca70a req-508635c5-dd4b-4c1e-b3ac-617898582ad9 service nova] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.911847] env[62508]: DEBUG nova.scheduler.client.report [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1918.926453] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06726} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.927249] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1918.928016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11210bb-6b6a-4ed7-8d11-dda1de4865ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.951639] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 5029bdd2-1f52-43ec-a978-b788b15a1204/5029bdd2-1f52-43ec-a978-b788b15a1204.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1918.952161] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fc9b120-d848-4600-bd79-6b1b802966c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.971992] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1918.971992] env[62508]: value = "task-1776882" [ 1918.971992] env[62508]: _type = "Task" [ 1918.971992] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.980976] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776882, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.287728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.416458] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.419262] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.868s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.419553] env[62508]: DEBUG nova.objects.instance [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'resources' on Instance uuid aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.435271] env[62508]: INFO nova.scheduler.client.report [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Deleted allocations for instance a63241ff-830a-4724-82ef-ad6c8836d2f5 [ 1919.484614] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776882, 'name': ReconfigVM_Task, 'duration_secs': 0.28299} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.484942] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 5029bdd2-1f52-43ec-a978-b788b15a1204/5029bdd2-1f52-43ec-a978-b788b15a1204.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1919.485640] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb2b2100-cb35-4658-b476-790941ac9659 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.497390] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1919.497390] env[62508]: value = "task-1776883" [ 1919.497390] env[62508]: _type = "Task" [ 1919.497390] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.505469] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776883, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.570854] env[62508]: DEBUG nova.compute.manager [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-vif-unplugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1919.571122] env[62508]: DEBUG oslo_concurrency.lockutils [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.571345] env[62508]: DEBUG oslo_concurrency.lockutils [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.571529] env[62508]: DEBUG oslo_concurrency.lockutils [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.571718] env[62508]: DEBUG nova.compute.manager [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] No waiting events found dispatching network-vif-unplugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1919.571858] env[62508]: WARNING nova.compute.manager [req-f098dc5d-f226-46b3-85a3-7b36df2839c8 req-8cd712ab-ad03-4b9e-917b-3459627ccd8f service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received unexpected event network-vif-unplugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc for instance with vm_state shelved and task_state shelving_offloading. [ 1919.574636] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.660132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1919.661137] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f8401d-1b11-4401-b367-a13db23c6b28 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.668853] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1919.669128] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57957dfe-b278-47f9-8745-57f27e17f53c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.745441] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1919.745784] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1919.745873] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleting the datastore file [datastore1] d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1919.746168] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56677ddf-23c0-409d-b686-468d4f05d9ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.752956] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1919.752956] env[62508]: value = "task-1776885" [ 1919.752956] env[62508]: _type = "Task" [ 1919.752956] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.760924] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.828647] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.828919] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.829155] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.829340] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.829510] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.831743] env[62508]: INFO nova.compute.manager [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Terminating instance [ 1919.833424] env[62508]: DEBUG nova.compute.manager [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1919.833616] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1919.834461] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05381f26-cfb1-48eb-bc1c-4ec6e70947f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.842653] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1919.842933] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70517c98-c4ab-476f-9575-a270c982d066 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.849819] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1919.849819] env[62508]: value = "task-1776886" [ 1919.849819] env[62508]: _type = "Task" [ 1919.849819] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.860339] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.922289] env[62508]: DEBUG nova.objects.instance [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'numa_topology' on Instance uuid aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.943048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-3495c440-1409-41f1-908c-0cf5e313cf09 tempest-ServerShowV257Test-128387811 tempest-ServerShowV257Test-128387811-project-member] Lock "a63241ff-830a-4724-82ef-ad6c8836d2f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.504s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.008734] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776883, 'name': Rename_Task, 'duration_secs': 0.196697} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.009015] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1920.009295] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfe2e80e-3608-47fa-ba89-4b7def6c7967 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.019599] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1920.019599] env[62508]: value = "task-1776887" [ 1920.019599] env[62508]: _type = "Task" [ 1920.019599] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.028361] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.263364] env[62508]: DEBUG oslo_vmware.api [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147613} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.263573] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1920.263790] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1920.264040] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1920.279953] env[62508]: INFO nova.scheduler.client.report [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted allocations for instance d58f5593-aafc-43e0-a040-96af10659b70 [ 1920.360232] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776886, 'name': PowerOffVM_Task, 'duration_secs': 0.205311} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.360545] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1920.360725] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1920.360963] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d7f4fa9-4df2-45c3-af80-01c5fa45b548 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.424949] env[62508]: DEBUG nova.objects.base [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1920.504898] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58587ad0-ec21-4223-a11b-1655a0ed53eb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.514630] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8eb6980-9b8f-477f-b8e0-0de44a9e3a47 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.547427] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5aa3dd0-432d-41ff-8a5c-f622862ece1b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.553229] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776887, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.558830] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1949e121-2557-45d4-966c-cf0d80c22972 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.575781] env[62508]: DEBUG nova.compute.provider_tree [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1920.784334] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.032749] env[62508]: DEBUG oslo_vmware.api [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776887, 'name': PowerOnVM_Task, 'duration_secs': 0.663848} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.033094] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1921.033317] env[62508]: INFO nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Took 12.07 seconds to spawn the instance on the hypervisor. [ 1921.033500] env[62508]: DEBUG nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1921.034866] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09482aed-cc0f-48cb-9e7d-fc2047c04175 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.079426] env[62508]: DEBUG nova.scheduler.client.report [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1921.513429] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1921.513646] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1921.513852] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleting the datastore file [datastore1] 48d8f1ee-4d35-4a64-a72a-e4a505675c8f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1921.514162] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23de2216-3e92-4c41-9a70-99e54c2753f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.521130] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for the task: (returnval){ [ 1921.521130] env[62508]: value = "task-1776889" [ 1921.521130] env[62508]: _type = "Task" [ 1921.521130] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.529472] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.552565] env[62508]: INFO nova.compute.manager [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Took 16.86 seconds to build instance. [ 1921.583580] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.585977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.837s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.586649] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.586649] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1921.586649] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.802s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.586897] env[62508]: DEBUG nova.objects.instance [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'resources' on Instance uuid d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1921.588446] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d541962e-d140-46cc-a393-40c8df91e7f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.598641] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5db512-741f-482d-a0df-665c21a4bac9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.605101] env[62508]: DEBUG nova.compute.manager [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1921.605347] env[62508]: DEBUG nova.compute.manager [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing instance network info cache due to event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1921.605604] env[62508]: DEBUG oslo_concurrency.lockutils [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.605790] env[62508]: DEBUG oslo_concurrency.lockutils [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.605970] env[62508]: DEBUG nova.network.neutron [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.619751] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fb3441-9bb9-4db0-96a4-44842de53d9c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.628270] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae134c37-a944-4954-9a42-3c24fd0c5e1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.663968] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179450MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1921.664170] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.031452] env[62508]: DEBUG oslo_vmware.api [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Task: {'id': task-1776889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162877} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.031868] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.031978] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1922.032130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1922.032319] env[62508]: INFO nova.compute.manager [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Took 2.20 seconds to destroy the instance on the hypervisor. [ 1922.032562] env[62508]: DEBUG oslo.service.loopingcall [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1922.032745] env[62508]: DEBUG nova.compute.manager [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1922.032988] env[62508]: DEBUG nova.network.neutron [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1922.054398] env[62508]: DEBUG oslo_concurrency.lockutils [None req-75038c45-7474-4af6-8ad0-4ee431e29bf5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.373s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.093118] env[62508]: DEBUG nova.objects.instance [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'numa_topology' on Instance uuid d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1922.094460] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ba6f91a4-ef21-4120-96e5-a2314dd50a5f tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.537s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.095397] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.520s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.095397] env[62508]: INFO nova.compute.manager [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Unshelving [ 1922.322330] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.372706] env[62508]: DEBUG nova.network.neutron [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updated VIF entry in instance network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.373160] env[62508]: DEBUG nova.network.neutron [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.597046] env[62508]: DEBUG nova.objects.base [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1922.689652] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23163ad3-e040-42b2-868a-f6d842486703 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.697664] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09588a66-9634-4c10-bbe2-9697f567e2e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.729443] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d491bc-9c00-4b22-94d7-2b1cd7410ca2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.736877] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f573c6-5f42-44c2-b757-60f79df07e09 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.750902] env[62508]: DEBUG nova.compute.provider_tree [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.775025] env[62508]: DEBUG nova.network.neutron [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.875743] env[62508]: DEBUG oslo_concurrency.lockutils [req-55e4093a-c748-49f5-aef2-c4583e141b8c req-45a2f494-1698-42c1-8e53-329adcfdcda7 service nova] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.052684] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "5029bdd2-1f52-43ec-a978-b788b15a1204" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.053046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.053372] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.053676] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.053960] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.056195] env[62508]: INFO nova.compute.manager [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Terminating instance [ 1923.057830] env[62508]: DEBUG nova.compute.manager [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1923.058034] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1923.058869] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226873aa-9e16-44e9-96ef-5f4f5535f9e8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.068039] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1923.068039] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6304f1be-066d-48c0-90c6-a1ba41904893 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.074256] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1923.074256] env[62508]: value = "task-1776890" [ 1923.074256] env[62508]: _type = "Task" [ 1923.074256] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.082043] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.119521] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.253996] env[62508]: DEBUG nova.scheduler.client.report [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1923.276784] env[62508]: INFO nova.compute.manager [-] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Took 1.24 seconds to deallocate network for instance. [ 1923.585234] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776890, 'name': PowerOffVM_Task, 'duration_secs': 0.19795} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.585503] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.585676] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.585927] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7251d2e8-09a8-4404-a46c-462addae0a97 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.629524] env[62508]: DEBUG nova.compute.manager [req-1ef28132-ae9b-486f-9bc7-be0102c60210 req-42a36ec4-6d7d-48ff-83c8-b11fb20c2ee6 service nova] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Received event network-vif-deleted-4ae7fae0-9c0f-40d4-b0a2-be02b7b5ac88 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1923.759072] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.762587] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.098s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.776880] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1923.776880] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1923.776880] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] 5029bdd2-1f52-43ec-a978-b788b15a1204 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1923.777156] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfadd735-af26-41c8-b953-4fe006fce642 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.783754] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.784941] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1923.784941] env[62508]: value = "task-1776892" [ 1923.784941] env[62508]: _type = "Task" [ 1923.784941] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.793144] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.270278] env[62508]: DEBUG oslo_concurrency.lockutils [None req-60333765-0da4-4e18-ac0c-44e91041d0ea tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.676s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.271128] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.948s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.271128] env[62508]: INFO nova.compute.manager [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Unshelving [ 1924.296823] env[62508]: DEBUG oslo_vmware.api [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139708} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.297087] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1924.297281] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1924.297496] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1924.297673] env[62508]: INFO nova.compute.manager [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1924.297916] env[62508]: DEBUG oslo.service.loopingcall [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.298359] env[62508]: DEBUG nova.compute.manager [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1924.298460] env[62508]: DEBUG nova.network.neutron [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.536958] env[62508]: DEBUG nova.compute.manager [req-b8ef85ac-3417-485e-bb38-3c7bdfc3399c req-ef9519ef-f6eb-4e06-89ce-567533298bec service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Received event network-vif-deleted-996a8114-49d9-442e-8799-c2866997e84d {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.537198] env[62508]: INFO nova.compute.manager [req-b8ef85ac-3417-485e-bb38-3c7bdfc3399c req-ef9519ef-f6eb-4e06-89ce-567533298bec service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Neutron deleted interface 996a8114-49d9-442e-8799-c2866997e84d; detaching it from the instance and deleting it from the info cache [ 1924.537286] env[62508]: DEBUG nova.network.neutron [req-b8ef85ac-3417-485e-bb38-3c7bdfc3399c req-ef9519ef-f6eb-4e06-89ce-567533298bec service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.790531] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance cd2424b1-3842-4df4-8636-23417833ea49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.790695] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.790880] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 09918540-a9ce-4c76-84b9-fbe452d5abf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.791016] env[62508]: WARNING nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 48d8f1ee-4d35-4a64-a72a-e4a505675c8f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1924.791148] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 5029bdd2-1f52-43ec-a978-b788b15a1204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.006234] env[62508]: DEBUG nova.network.neutron [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.041149] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cb9ae4e-36e5-4e79-9e75-b6b79eb104f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.050435] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bf802f-0b7c-438c-978c-3615066f69c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.077323] env[62508]: DEBUG nova.compute.manager [req-b8ef85ac-3417-485e-bb38-3c7bdfc3399c req-ef9519ef-f6eb-4e06-89ce-567533298bec service nova] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Detach interface failed, port_id=996a8114-49d9-442e-8799-c2866997e84d, reason: Instance 5029bdd2-1f52-43ec-a978-b788b15a1204 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1925.292174] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.294027] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aea987d3-1daf-45f5-84c3-893eb6bdb57a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1925.294263] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (5d5b4923-a8ac-4688-9f86-2405bd3406a9): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1925.294464] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1925.294607] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1925.379611] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4347a7ea-fe72-4f35-bc5c-d2613212167a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.388221] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7cd769-e956-42b4-97d1-66e600c2c7d0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.418827] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08dfc2f-1c75-41e3-b63c-c200958d9614 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.426373] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50b87c3-1fc0-4962-b176-afe0b25fe959 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.439631] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1925.508275] env[62508]: INFO nova.compute.manager [-] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Took 1.21 seconds to deallocate network for instance. [ 1925.961021] env[62508]: ERROR nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [req-750bfe94-5c0e-41fa-bf18-f730d5b24c8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-750bfe94-5c0e-41fa-bf18-f730d5b24c8a"}]} [ 1925.977206] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1925.989525] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1925.989787] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1926.000532] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1926.014244] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.017825] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1926.094833] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c45b9d8-d80d-4ffe-97f8-96190ac1094a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.102852] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc6a20f-93ad-42dd-aa72-1c99e495ecfc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.131908] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04f7876-5935-4e8f-90fd-238fde3709e5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.138937] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565df17f-c5fd-4785-b6df-9de96fc702ac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.152808] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1926.683152] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 188 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1926.683455] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 188 to 189 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1926.683530] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1927.188229] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1927.188427] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.426s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.188696] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.069s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.188913] env[62508]: DEBUG nova.objects.instance [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'pci_requests' on Instance uuid aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.693029] env[62508]: DEBUG nova.objects.instance [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'numa_topology' on Instance uuid aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.195776] env[62508]: INFO nova.compute.claims [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1929.285026] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd701b5c-9d00-48ed-9b1e-adea24e10598 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.292859] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709213db-fe69-4822-a67f-d2c859286955 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.322931] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d670f7a-7ec6-46b9-b700-f8fdf5a87959 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.330013] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaa3735-32db-449e-a84f-000255cbafe8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.342838] env[62508]: DEBUG nova.compute.provider_tree [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1929.846058] env[62508]: DEBUG nova.scheduler.client.report [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1930.352232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.163s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.354255] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.571s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.355048] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.356648] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.065s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.356816] env[62508]: DEBUG nova.objects.instance [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'pci_requests' on Instance uuid d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.372955] env[62508]: INFO nova.scheduler.client.report [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Deleted allocations for instance 48d8f1ee-4d35-4a64-a72a-e4a505675c8f [ 1930.385434] env[62508]: INFO nova.network.neutron [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating port 60434e32-b866-43d2-8a33-8925c4459e29 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1930.861155] env[62508]: DEBUG nova.objects.instance [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'numa_topology' on Instance uuid d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.880503] env[62508]: DEBUG oslo_concurrency.lockutils [None req-708f8920-1dd8-4743-9dd5-9af0df9a11ff tempest-DeleteServersTestJSON-66528752 tempest-DeleteServersTestJSON-66528752-project-member] Lock "48d8f1ee-4d35-4a64-a72a-e4a505675c8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.051s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.363523] env[62508]: INFO nova.compute.claims [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1931.393016] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.393016] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.393016] env[62508]: DEBUG nova.compute.manager [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1931.393016] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67178db2-6c1a-489b-971a-72f92210bc3d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.399426] env[62508]: DEBUG nova.compute.manager [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1931.400078] env[62508]: DEBUG nova.objects.instance [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'flavor' on Instance uuid 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1931.770170] env[62508]: DEBUG nova.compute.manager [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1931.770170] env[62508]: DEBUG oslo_concurrency.lockutils [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.770170] env[62508]: DEBUG oslo_concurrency.lockutils [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.770170] env[62508]: DEBUG oslo_concurrency.lockutils [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.770170] env[62508]: DEBUG nova.compute.manager [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] No waiting events found dispatching network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1931.770170] env[62508]: WARNING nova.compute.manager [req-a49c8fdb-a6a0-4ed8-a9bf-5b9e4bac7249 req-a34bda46-1e21-44a3-bdff-08cadbfb5a46 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received unexpected event network-vif-plugged-60434e32-b866-43d2-8a33-8925c4459e29 for instance with vm_state shelved_offloaded and task_state spawning. [ 1931.850037] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.850263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.850448] env[62508]: DEBUG nova.network.neutron [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1931.905087] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1931.905087] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48777ff2-4da3-42ce-8138-75687fa93df6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.915709] env[62508]: DEBUG oslo_vmware.api [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1931.915709] env[62508]: value = "task-1776894" [ 1931.915709] env[62508]: _type = "Task" [ 1931.915709] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.925837] env[62508]: DEBUG oslo_vmware.api [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776894, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.427288] env[62508]: DEBUG oslo_vmware.api [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776894, 'name': PowerOffVM_Task, 'duration_secs': 0.216757} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.429664] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.429854] env[62508]: DEBUG nova.compute.manager [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1932.430833] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e95fb51-ffdf-4b86-981a-bfe3cad4edc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.462888] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dbe2d2-0ca2-4329-a9df-dd5006e4055e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.471383] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b888a7a2-9f42-49e5-b348-a9d78193fda6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.520413] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde88aa8-9631-4358-9449-c0a6e2d75a69 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.529791] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7bc62e-4ea1-416b-a5e9-8580ab0d6f02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.544381] env[62508]: DEBUG nova.compute.provider_tree [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1932.621294] env[62508]: DEBUG nova.network.neutron [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.944736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-eecb82b5-2d44-4fd3-902c-69d307c16614 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.047370] env[62508]: DEBUG nova.scheduler.client.report [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1933.123944] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.151989] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='afdbe50d1a6ae68441402054fafbf214',container_format='bare',created_at=2024-12-11T22:19:12Z,direct_url=,disk_format='vmdk',id=353a10e7-4163-47d9-8158-bb4b40bd7029,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-576952251-shelved',owner='86a46b5a43dd41e48816a8d86e3685b9',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-12-11T22:19:28Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1933.152269] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1933.152423] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1933.152601] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1933.152746] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1933.152891] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1933.153131] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1933.153309] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1933.153476] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1933.153637] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1933.153804] env[62508]: DEBUG nova.virt.hardware [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1933.154706] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9b8770-5a8a-4607-a5ed-08a325cc023d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.162876] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3c0ffe-53c7-4025-8398-9884c5b6ea0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.176665] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:a7:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60434e32-b866-43d2-8a33-8925c4459e29', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1933.184158] env[62508]: DEBUG oslo.service.loopingcall [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1933.184400] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1933.184617] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa18fa1e-4025-47ad-86dc-d1d2334206d5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.204683] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1933.204683] env[62508]: value = "task-1776895" [ 1933.204683] env[62508]: _type = "Task" [ 1933.204683] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.213071] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776895, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.552496] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.196s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.555415] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.541s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.555688] env[62508]: DEBUG nova.objects.instance [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid 5029bdd2-1f52-43ec-a978-b788b15a1204 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.595777] env[62508]: INFO nova.network.neutron [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating port 8fad22e1-6bfd-45d7-89b1-d953be11abbc with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1933.716584] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776895, 'name': CreateVM_Task, 'duration_secs': 0.380468} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.716759] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1933.717452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.717625] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.718075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1933.718350] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be15cc92-3bf9-4883-9fd9-f8733011f455 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.723387] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1933.723387] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521051cc-ceca-6cf3-2f49-8455fd60a04e" [ 1933.723387] env[62508]: _type = "Task" [ 1933.723387] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.735960] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]521051cc-ceca-6cf3-2f49-8455fd60a04e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.786496] env[62508]: DEBUG nova.objects.instance [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'flavor' on Instance uuid 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.794955] env[62508]: DEBUG nova.compute.manager [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-changed-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1933.795215] env[62508]: DEBUG nova.compute.manager [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing instance network info cache due to event network-changed-60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1933.795434] env[62508]: DEBUG oslo_concurrency.lockutils [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.795577] env[62508]: DEBUG oslo_concurrency.lockutils [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.795737] env[62508]: DEBUG nova.network.neutron [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Refreshing network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1934.147155] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c08d0e-2390-4c41-bfb4-040d2bc67a36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.155196] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d42d6c7-2e5c-4272-9086-5654f1f4e093 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.186178] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2bd3f4-7aaf-4af2-bba6-5c355cb08731 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.193999] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c40ff9-aad1-47ef-9e9c-01be806b05ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.207435] env[62508]: DEBUG nova.compute.provider_tree [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1934.234761] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.235043] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Processing image 353a10e7-4163-47d9-8158-bb4b40bd7029 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1934.235268] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.235421] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.235631] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1934.235886] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-576ac631-026a-4464-b417-4e252645a6ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.246095] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1934.246293] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1934.246993] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e765b73-ae38-4343-ae95-db08e08b5ae2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.252496] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1934.252496] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5230609f-2ee0-4834-fd9f-84c5487d0e92" [ 1934.252496] env[62508]: _type = "Task" [ 1934.252496] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.260709] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5230609f-2ee0-4834-fd9f-84c5487d0e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.291537] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.291721] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.291873] env[62508]: DEBUG nova.network.neutron [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.292214] env[62508]: DEBUG nova.objects.instance [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'info_cache' on Instance uuid 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.549910] env[62508]: DEBUG nova.network.neutron [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updated VIF entry in instance network info cache for port 60434e32-b866-43d2-8a33-8925c4459e29. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1934.550305] env[62508]: DEBUG nova.network.neutron [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.738580] env[62508]: DEBUG nova.scheduler.client.report [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 189 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1934.738897] env[62508]: DEBUG nova.compute.provider_tree [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 189 to 190 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1934.739041] env[62508]: DEBUG nova.compute.provider_tree [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1934.764863] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1934.765051] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Fetch image to [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f/OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1934.765222] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Downloading stream optimized image 353a10e7-4163-47d9-8158-bb4b40bd7029 to [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f/OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f.vmdk on the data store datastore1 as vApp {{(pid=62508) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1934.765464] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Downloading image file data 353a10e7-4163-47d9-8158-bb4b40bd7029 to the ESX as VM named 'OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f' {{(pid=62508) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1934.809404] env[62508]: DEBUG nova.objects.base [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Object Instance<716fc0ee-9aa7-4d2f-a5e0-024484bbe014> lazy-loaded attributes: flavor,info_cache {{(pid=62508) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1934.842338] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1934.842338] env[62508]: value = "resgroup-9" [ 1934.842338] env[62508]: _type = "ResourcePool" [ 1934.842338] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1934.842615] env[62508]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a3ac7a22-a605-48ae-8c37-235d292c1d74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.865489] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease: (returnval){ [ 1934.865489] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1934.865489] env[62508]: _type = "HttpNfcLease" [ 1934.865489] env[62508]: } obtained for vApp import into resource pool (val){ [ 1934.865489] env[62508]: value = "resgroup-9" [ 1934.865489] env[62508]: _type = "ResourcePool" [ 1934.865489] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1934.865811] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the lease: (returnval){ [ 1934.865811] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1934.865811] env[62508]: _type = "HttpNfcLease" [ 1934.865811] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1934.873598] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1934.873598] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1934.873598] env[62508]: _type = "HttpNfcLease" [ 1934.873598] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1935.053613] env[62508]: DEBUG oslo_concurrency.lockutils [req-688d9b43-aba2-44e3-a2fb-5b59dc8c2984 req-ee4fa029-b00b-4811-b8e5-4d836bd497c4 service nova] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.243598] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.262793] env[62508]: INFO nova.scheduler.client.report [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance 5029bdd2-1f52-43ec-a978-b788b15a1204 [ 1935.375308] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1935.375308] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1935.375308] env[62508]: _type = "HttpNfcLease" [ 1935.375308] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1935.705517] env[62508]: DEBUG nova.network.neutron [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.770833] env[62508]: DEBUG oslo_concurrency.lockutils [None req-bfb2339f-f414-4575-a4f3-12759121439e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "5029bdd2-1f52-43ec-a978-b788b15a1204" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.718s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.875697] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1935.875697] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1935.875697] env[62508]: _type = "HttpNfcLease" [ 1935.875697] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1935.876013] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1935.876013] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]528af84a-af96-21bc-9849-fdf5c6d24fd3" [ 1935.876013] env[62508]: _type = "HttpNfcLease" [ 1935.876013] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1935.876761] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d9a4ee-6547-48b8-b4f3-ad2312d30318 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.883976] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1935.884168] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1935.947376] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a07d3590-44d6-46db-81ab-97dab151240c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.964619] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.964901] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.965124] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.965323] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.965498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.967722] env[62508]: INFO nova.compute.manager [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Terminating instance [ 1935.969516] env[62508]: DEBUG nova.compute.manager [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1935.969718] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1935.970661] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3deba349-5383-4a44-9d55-bc1db7bfb713 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.979150] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1935.979404] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9df1d5d-24d8-4ddc-b18d-b9154df9a830 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.986569] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1935.986569] env[62508]: value = "task-1776897" [ 1935.986569] env[62508]: _type = "Task" [ 1935.986569] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.995552] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.211675] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.497957] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776897, 'name': PowerOffVM_Task, 'duration_secs': 0.210291} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.498255] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1936.498427] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1936.498676] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4627ea01-5688-4e39-b271-45244437c549 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.716697] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1936.716908] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-538ee8ac-310d-4849-bd7e-7e036ddc54d9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.726428] env[62508]: DEBUG oslo_vmware.api [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1936.726428] env[62508]: value = "task-1776899" [ 1936.726428] env[62508]: _type = "Task" [ 1936.726428] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.735259] env[62508]: DEBUG oslo_vmware.api [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.237776] env[62508]: DEBUG oslo_vmware.api [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776899, 'name': PowerOnVM_Task, 'duration_secs': 0.437978} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.238085] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1937.238263] env[62508]: DEBUG nova.compute.manager [None req-d18b3f21-ce30-4715-8e79-ae8bb88914e3 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1937.239309] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e078f4-b69f-462a-a4ca-1f7a8ebb8d66 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.884022] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111768fb-a098-455b-9108-475b2bc0be5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.891788] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Suspending the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1938.891915] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cec273ce-c16c-409d-af77-ff5fd081d268 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.899349] env[62508]: DEBUG oslo_vmware.api [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1938.899349] env[62508]: value = "task-1776900" [ 1938.899349] env[62508]: _type = "Task" [ 1938.899349] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.910465] env[62508]: DEBUG oslo_vmware.api [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776900, 'name': SuspendVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.409208] env[62508]: DEBUG oslo_vmware.api [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776900, 'name': SuspendVM_Task} progress is 70%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.909835] env[62508]: DEBUG oslo_vmware.api [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776900, 'name': SuspendVM_Task, 'duration_secs': 0.658765} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.910286] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Suspended the VM {{(pid=62508) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1939.910286] env[62508]: DEBUG nova.compute.manager [None req-6b89e38e-0c21-4a57-b826-24b0b25515b4 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1939.911035] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90472030-be51-416a-a68d-9d7ce7abb5c1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.995618] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1940.996090] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1940.996922] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8524599e-8dd6-495a-8b27-71af06a46d44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.004382] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1941.004597] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1941.004806] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f4b44ca0-0280-4aa9-9857-099c84eb276f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.251671] env[62508]: DEBUG oslo_vmware.rw_handles [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521972a1-30ce-f558-780d-2a5cfd163574/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1941.251892] env[62508]: INFO nova.virt.vmwareapi.images [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Downloaded image file data 353a10e7-4163-47d9-8158-bb4b40bd7029 [ 1941.252824] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cace891-9648-4c0c-8ce7-6a2b4a73acd7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.268939] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b584a8f4-2d8c-4f9e-a96d-bc84267476d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.273089] env[62508]: INFO nova.compute.manager [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Resuming [ 1941.273714] env[62508]: DEBUG nova.objects.instance [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'flavor' on Instance uuid 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1941.304770] env[62508]: INFO nova.virt.vmwareapi.images [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] The imported VM was unregistered [ 1941.307450] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1941.307729] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Creating directory with path [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1941.308091] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4400b65c-4ed5-49ac-b453-676599b3e55f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.342549] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Created directory with path [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.342844] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f/OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f.vmdk to [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk. {{(pid=62508) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1941.343132] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-309ba9e0-0b07-4ad7-9ace-eb7b5c8096d7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.350589] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1941.350589] env[62508]: value = "task-1776902" [ 1941.350589] env[62508]: _type = "Task" [ 1941.350589] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.359873] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.861729] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.281872] env[62508]: DEBUG oslo_concurrency.lockutils [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.282256] env[62508]: DEBUG oslo_concurrency.lockutils [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquired lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.282386] env[62508]: DEBUG nova.network.neutron [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1942.361883] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.862541] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.034155] env[62508]: DEBUG nova.network.neutron [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [{"id": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "address": "fa:16:3e:c6:7f:9f", "network": {"id": "6916575c-317f-4c93-95a6-012e37107f15", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-307053976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce0dd059301e41abb3758625d38e435e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b96af68-0b", "ovs_interfaceid": "0b96af68-0b68-4eac-81c7-2d671bfe54fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.362954] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.538753] env[62508]: DEBUG oslo_concurrency.lockutils [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Releasing lock "refresh_cache-716fc0ee-9aa7-4d2f-a5e0-024484bbe014" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.540066] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9b39d4-184d-4876-92f7-e5e92e7953b3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.547119] env[62508]: DEBUG nova.compute.manager [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1943.547321] env[62508]: DEBUG oslo_concurrency.lockutils [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.547453] env[62508]: DEBUG oslo_concurrency.lockutils [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.547567] env[62508]: DEBUG oslo_concurrency.lockutils [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.547718] env[62508]: DEBUG nova.compute.manager [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] No waiting events found dispatching network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1943.547887] env[62508]: WARNING nova.compute.manager [req-0655db51-736a-4223-8100-61c1d73c3b4d req-1506f495-1bef-4959-b679-39740c81343e service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received unexpected event network-vif-plugged-8fad22e1-6bfd-45d7-89b1-d953be11abbc for instance with vm_state shelved_offloaded and task_state spawning. [ 1943.550330] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Resuming the VM {{(pid=62508) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1943.550639] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3728a013-8df8-40a4-94e8-6f7397f8c2b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.559868] env[62508]: DEBUG oslo_vmware.api [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1943.559868] env[62508]: value = "task-1776903" [ 1943.559868] env[62508]: _type = "Task" [ 1943.559868] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.570788] env[62508]: DEBUG oslo_vmware.api [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.636122] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.636446] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.636741] env[62508]: DEBUG nova.network.neutron [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1943.718481] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1943.718744] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1943.718814] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] 09918540-a9ce-4c76-84b9-fbe452d5abf3 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1943.719062] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69572645-cec0-4592-be78-eeafc59ff07d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.728217] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1943.728217] env[62508]: value = "task-1776904" [ 1943.728217] env[62508]: _type = "Task" [ 1943.728217] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.737849] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.866393] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.076386] env[62508]: DEBUG oslo_vmware.api [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.240741] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.367319] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.553443] env[62508]: DEBUG nova.network.neutron [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.572874] env[62508]: DEBUG oslo_vmware.api [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776903, 'name': PowerOnVM_Task, 'duration_secs': 0.599204} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.573173] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Resumed the VM {{(pid=62508) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1944.573358] env[62508]: DEBUG nova.compute.manager [None req-597cce2b-53fb-4c24-bd0d-8a1f3d11dfb7 tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1944.574280] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5fe372-e32d-476d-8234-6869c5c91205 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.740635] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.865428] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.056189] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.083814] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='93ba9c6825e73562261f388571c1d91b',container_format='bare',created_at=2024-12-11T22:19:17Z,direct_url=,disk_format='vmdk',id=9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-406990823-shelved',owner='ce113e91e2b74136a8050ed3acf3557c',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-12-11T22:19:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1945.084085] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1945.084250] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1945.084469] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1945.084663] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1945.084821] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1945.085043] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1945.085210] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1945.085380] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1945.085548] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1945.085726] env[62508]: DEBUG nova.virt.hardware [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1945.088499] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2f3d89-3356-4965-83d0-9a5b6cdc2630 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.098020] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba61f6eb-94df-49c1-bfad-77a52a021ff5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.113244] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:f6:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fad22e1-6bfd-45d7-89b1-d953be11abbc', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1945.120814] env[62508]: DEBUG oslo.service.loopingcall [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.121610] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1945.121839] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68b55ae8-772e-44a4-97e1-1d09ea5d89df {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.142908] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1945.142908] env[62508]: value = "task-1776905" [ 1945.142908] env[62508]: _type = "Task" [ 1945.142908] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.151216] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776905, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.241031] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.367930] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.622889] env[62508]: DEBUG nova.compute.manager [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1945.623139] env[62508]: DEBUG nova.compute.manager [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing instance network info cache due to event network-changed-8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1945.623358] env[62508]: DEBUG oslo_concurrency.lockutils [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] Acquiring lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.623560] env[62508]: DEBUG oslo_concurrency.lockutils [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] Acquired lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.623733] env[62508]: DEBUG nova.network.neutron [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Refreshing network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.653849] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776905, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.740886] env[62508]: DEBUG oslo_vmware.api [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.719666} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.741175] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1945.741397] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1945.741629] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1945.741810] env[62508]: INFO nova.compute.manager [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Took 9.77 seconds to destroy the instance on the hypervisor. [ 1945.742069] env[62508]: DEBUG oslo.service.loopingcall [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.742269] env[62508]: DEBUG nova.compute.manager [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1945.742363] env[62508]: DEBUG nova.network.neutron [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1945.867293] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776902, 'name': MoveVirtualDisk_Task, 'duration_secs': 4.073789} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.867552] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f/OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f.vmdk to [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk. [ 1945.867749] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Cleaning up location [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1945.867916] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_65f27cb0-804e-4498-925a-bcdbd6634b3f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1945.868200] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03c15b98-33d8-497c-8905-cc1cf804ef2b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.874977] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1945.874977] env[62508]: value = "task-1776906" [ 1945.874977] env[62508]: _type = "Task" [ 1945.874977] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.882904] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.154923] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776905, 'name': CreateVM_Task, 'duration_secs': 0.601404} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.157073] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1946.157724] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.157889] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.158271] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1946.158793] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12a261fa-9122-467a-b8d8-99f23c8cbea8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.165477] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1946.165477] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b65968-2cbf-d7bf-b8ca-2680e3ff8cc5" [ 1946.165477] env[62508]: _type = "Task" [ 1946.165477] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.174899] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b65968-2cbf-d7bf-b8ca-2680e3ff8cc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.184608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.184917] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.185155] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.185353] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.185577] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.188049] env[62508]: INFO nova.compute.manager [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Terminating instance [ 1946.190535] env[62508]: DEBUG nova.compute.manager [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1946.190760] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1946.191729] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02afc850-10c7-420e-94fb-3b6445d86710 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.200741] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1946.200994] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-253bcacf-39c9-40d1-b516-2f055ab65ba9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.211454] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1946.211454] env[62508]: value = "task-1776907" [ 1946.211454] env[62508]: _type = "Task" [ 1946.211454] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.219824] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.358248] env[62508]: DEBUG nova.network.neutron [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updated VIF entry in instance network info cache for port 8fad22e1-6bfd-45d7-89b1-d953be11abbc. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1946.358836] env[62508]: DEBUG nova.network.neutron [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [{"id": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "address": "fa:16:3e:6e:f6:0b", "network": {"id": "63896a73-637d-496f-b12b-d5e5818202d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1441584005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce113e91e2b74136a8050ed3acf3557c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fad22e1-6b", "ovs_interfaceid": "8fad22e1-6bfd-45d7-89b1-d953be11abbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.389822] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060481} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.390177] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.390396] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.390718] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk to [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1946.391054] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da227a5f-9c1f-4b58-b3e4-10c5c9f13ff6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.400677] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1946.400677] env[62508]: value = "task-1776908" [ 1946.400677] env[62508]: _type = "Task" [ 1946.400677] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.412589] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.459770] env[62508]: DEBUG nova.network.neutron [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.677781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.678131] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Processing image 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1946.678527] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.678635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.678876] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.679189] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43b5db27-dc37-4c0f-ab0f-b3ac54912a25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.713821] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.714016] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1946.718294] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6757976-5a76-4da5-90a8-fc0e2fe236ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.725503] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1946.725503] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52395f2f-fd88-d8b8-5501-59f47387af72" [ 1946.725503] env[62508]: _type = "Task" [ 1946.725503] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.728648] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776907, 'name': PowerOffVM_Task, 'duration_secs': 0.194981} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.732235] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1946.732438] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1946.733072] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bc7742c-3965-4680-90b4-22912d837d22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.740804] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52395f2f-fd88-d8b8-5501-59f47387af72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.862803] env[62508]: DEBUG oslo_concurrency.lockutils [req-b3da2546-f956-4931-9e5e-c7a6881a6dcf req-1d2a16ec-9670-4ed5-a248-e92cf7aa4145 service nova] Releasing lock "refresh_cache-d58f5593-aafc-43e0-a040-96af10659b70" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.888451] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1946.888775] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1946.888993] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleting the datastore file [datastore1] 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1946.889289] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25b6738b-79bf-44ed-987a-03f714c4c866 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.898037] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for the task: (returnval){ [ 1946.898037] env[62508]: value = "task-1776910" [ 1946.898037] env[62508]: _type = "Task" [ 1946.898037] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.904425] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.912064] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.962934] env[62508]: INFO nova.compute.manager [-] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Took 1.22 seconds to deallocate network for instance. [ 1947.239625] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Preparing fetch location {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1947.239966] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Fetch image to [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f/OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f.vmdk {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1947.240105] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Downloading stream optimized image 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 to [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f/OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f.vmdk on the data store datastore1 as vApp {{(pid=62508) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1947.240273] env[62508]: DEBUG nova.virt.vmwareapi.images [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Downloading image file data 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 to the ESX as VM named 'OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f' {{(pid=62508) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1947.319273] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1947.319273] env[62508]: value = "resgroup-9" [ 1947.319273] env[62508]: _type = "ResourcePool" [ 1947.319273] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1947.319573] env[62508]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c4bb12db-98a0-4b2b-a326-d70cda756b27 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.344972] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease: (returnval){ [ 1947.344972] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1947.344972] env[62508]: _type = "HttpNfcLease" [ 1947.344972] env[62508]: } obtained for vApp import into resource pool (val){ [ 1947.344972] env[62508]: value = "resgroup-9" [ 1947.344972] env[62508]: _type = "ResourcePool" [ 1947.344972] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1947.345417] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the lease: (returnval){ [ 1947.345417] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1947.345417] env[62508]: _type = "HttpNfcLease" [ 1947.345417] env[62508]: } to be ready. {{(pid=62508) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1947.352352] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1947.352352] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1947.352352] env[62508]: _type = "HttpNfcLease" [ 1947.352352] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1947.406829] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.415874] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.470244] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.470530] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.470765] env[62508]: DEBUG nova.objects.instance [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid 09918540-a9ce-4c76-84b9-fbe452d5abf3 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1947.649211] env[62508]: DEBUG nova.compute.manager [req-ec5c8ec6-8353-4329-a55c-890933db617f req-ac96c776-95a0-4ac7-b676-e21430666195 service nova] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Received event network-vif-deleted-e356c32d-0aab-4beb-8c5c-58de8aaf9cc4 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1947.854157] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1947.854157] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1947.854157] env[62508]: _type = "HttpNfcLease" [ 1947.854157] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1947.907507] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.917195] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.056295] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aef7cdd-c8ac-46ff-9202-731a1d0a1fd4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.067685] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c34d4a-e6d0-4ee8-8fb3-45131664e1d4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.099702] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31972c9-8498-4fbf-a5c5-ce1eb156d9f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.108806] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf0b983-3264-4fc6-aefa-fb5ada3ba6b5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.123571] env[62508]: DEBUG nova.compute.provider_tree [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1948.354949] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1948.354949] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1948.354949] env[62508]: _type = "HttpNfcLease" [ 1948.354949] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1948.407383] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.416302] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.626895] env[62508]: DEBUG nova.scheduler.client.report [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1948.855737] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1948.855737] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1948.855737] env[62508]: _type = "HttpNfcLease" [ 1948.855737] env[62508]: } is initializing. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1948.907671] env[62508]: DEBUG oslo_vmware.api [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Task: {'id': task-1776910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.928163} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.909049] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1948.909049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1948.909049] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1948.909049] env[62508]: INFO nova.compute.manager [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Took 2.72 seconds to destroy the instance on the hypervisor. [ 1948.909049] env[62508]: DEBUG oslo.service.loopingcall [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1948.911902] env[62508]: DEBUG nova.compute.manager [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1948.912009] env[62508]: DEBUG nova.network.neutron [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1948.919939] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776908, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.42739} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.920188] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/353a10e7-4163-47d9-8158-bb4b40bd7029/353a10e7-4163-47d9-8158-bb4b40bd7029.vmdk to [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1948.920922] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c336c18-77d2-456f-99c1-17f0642464de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.942523] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1948.942767] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d422e64-a697-4922-9f54-c1a6f1cfe5ed {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.963222] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1948.963222] env[62508]: value = "task-1776912" [ 1948.963222] env[62508]: _type = "Task" [ 1948.963222] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.971141] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776912, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.131940] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.153336] env[62508]: INFO nova.scheduler.client.report [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance 09918540-a9ce-4c76-84b9-fbe452d5abf3 [ 1949.362581] env[62508]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1949.362581] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1949.362581] env[62508]: _type = "HttpNfcLease" [ 1949.362581] env[62508]: } is ready. {{(pid=62508) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1949.364233] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1949.364233] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]529c9bed-b855-6faa-ce18-48ef13806252" [ 1949.364233] env[62508]: _type = "HttpNfcLease" [ 1949.364233] env[62508]: }. {{(pid=62508) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1949.366470] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a750e118-c519-4cac-beb7-1ad214d0816d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.377192] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk from lease info. {{(pid=62508) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1949.377458] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk. {{(pid=62508) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1949.442744] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d00ee84d-79cf-4ba8-b402-5a64d5fb9b39 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.473302] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776912, 'name': ReconfigVM_Task, 'duration_secs': 0.374972} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.473635] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Reconfigured VM instance instance-0000006b to attach disk [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a/aea987d3-1daf-45f5-84c3-893eb6bdb57a.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1949.474291] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-502a5eed-6a97-4c6c-8b6d-a2603a3bf7fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.482344] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1949.482344] env[62508]: value = "task-1776913" [ 1949.482344] env[62508]: _type = "Task" [ 1949.482344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.491841] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776913, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.660760] env[62508]: DEBUG oslo_concurrency.lockutils [None req-fa17e63e-f2f6-49d9-a9f3-e0dcc36aa4d5 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "09918540-a9ce-4c76-84b9-fbe452d5abf3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.696s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.682893] env[62508]: DEBUG nova.compute.manager [req-e403c838-3e74-4918-95f6-ce0226c62259 req-a6dbd8f8-5237-4c6e-821f-4b83d82b5d84 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Received event network-vif-deleted-0b96af68-0b68-4eac-81c7-2d671bfe54fa {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1949.683487] env[62508]: INFO nova.compute.manager [req-e403c838-3e74-4918-95f6-ce0226c62259 req-a6dbd8f8-5237-4c6e-821f-4b83d82b5d84 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Neutron deleted interface 0b96af68-0b68-4eac-81c7-2d671bfe54fa; detaching it from the instance and deleting it from the info cache [ 1949.683487] env[62508]: DEBUG nova.network.neutron [req-e403c838-3e74-4918-95f6-ce0226c62259 req-a6dbd8f8-5237-4c6e-821f-4b83d82b5d84 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.855752] env[62508]: DEBUG nova.network.neutron [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.995231] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776913, 'name': Rename_Task, 'duration_secs': 0.160125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.997196] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1949.997473] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8010233-6aac-4c90-8183-863b3887aaee {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.005344] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1950.005344] env[62508]: value = "task-1776914" [ 1950.005344] env[62508]: _type = "Task" [ 1950.005344] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.016124] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.185783] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65eb480a-f5db-4564-bff6-10cae41c64c4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.196773] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38467b97-c8e0-4e6b-8c83-8f9208d5ef27 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.236237] env[62508]: DEBUG nova.compute.manager [req-e403c838-3e74-4918-95f6-ce0226c62259 req-a6dbd8f8-5237-4c6e-821f-4b83d82b5d84 service nova] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Detach interface failed, port_id=0b96af68-0b68-4eac-81c7-2d671bfe54fa, reason: Instance 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1950.359028] env[62508]: INFO nova.compute.manager [-] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Took 1.45 seconds to deallocate network for instance. [ 1950.519676] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776914, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.867066] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.867871] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.867871] env[62508]: DEBUG nova.objects.instance [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lazy-loading 'resources' on Instance uuid 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.895452] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.895781] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.017469] env[62508]: DEBUG oslo_vmware.api [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776914, 'name': PowerOnVM_Task, 'duration_secs': 0.6805} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.017727] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1951.125816] env[62508]: DEBUG nova.compute.manager [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1951.126852] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb43827b-7876-4ace-aa2c-de81e6f536b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.399251] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1951.444478] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2948bc-0335-4bc4-8488-13d0f95204f6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.452783] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b42bd91-cdd7-4e1c-b108-906f043e6bac {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.484445] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45059c22-cc8e-47d6-bc7a-04d8b4847d44 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.491522] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31bc67c-409d-4168-847b-93b43d0b8ab1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.504542] env[62508]: DEBUG nova.compute.provider_tree [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.644171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-23d619a2-ea98-497c-934f-207d7506df80 tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.549s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.754030] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Completed reading data from the image iterator. {{(pid=62508) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1951.754336] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1951.755360] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8c8d4d-1493-4e38-8d4e-66abc136ad03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.763263] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk is in state: ready. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1951.763449] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk. {{(pid=62508) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1951.763716] env[62508]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-85ad4781-366d-4968-a529-a9d4236c81e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.920555] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.007616] env[62508]: DEBUG nova.scheduler.client.report [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1952.073542] env[62508]: DEBUG oslo_vmware.rw_handles [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5230adac-047e-61ac-10e4-b9f41fac362b/disk-0.vmdk. {{(pid=62508) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1952.073803] env[62508]: INFO nova.virt.vmwareapi.images [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Downloaded image file data 9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 [ 1952.074767] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9319b29b-5def-4eb1-b5e5-78b0ac961f0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.092017] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62fa7a9e-705f-4b75-8462-620db396b555 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.118927] env[62508]: INFO nova.virt.vmwareapi.images [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] The imported VM was unregistered [ 1952.121493] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Caching image {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1952.121746] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1952.122081] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-202b228e-bd2e-4b71-b97e-c28a2292d51f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.146886] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Created directory with path [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5 {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1952.147104] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f/OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f.vmdk to [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk. {{(pid=62508) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1952.147369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-41defca4-8352-42d2-9c3b-3827c749dda9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.154472] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1952.154472] env[62508]: value = "task-1776916" [ 1952.154472] env[62508]: _type = "Task" [ 1952.154472] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.163825] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.512864] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.515591] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.595s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.517324] env[62508]: INFO nova.compute.claims [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1952.532976] env[62508]: INFO nova.scheduler.client.report [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Deleted allocations for instance 716fc0ee-9aa7-4d2f-a5e0-024484bbe014 [ 1952.665969] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.040213] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a64dcdb3-3865-4455-8827-68fa2262045b tempest-ServerActionsTestJSON-1490369560 tempest-ServerActionsTestJSON-1490369560-project-member] Lock "716fc0ee-9aa7-4d2f-a5e0-024484bbe014" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.855s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.166863] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.595170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037b89c1-8c10-4822-8e69-72670733ceb9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.604451] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19ef49-ff44-4165-bc4e-35b872236f82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.636740] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442de458-ac72-42aa-b00a-4f7c25469343 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.646706] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c574b21-eeb8-4fb3-8049-959d188d4648 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.660216] env[62508]: DEBUG nova.compute.provider_tree [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1953.670509] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.166415] env[62508]: DEBUG nova.scheduler.client.report [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1954.173206] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.672832] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.674898] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.675090] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1955.172843] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776916, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.667452} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.173165] env[62508]: INFO nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f/OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f.vmdk to [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk. [ 1955.173392] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Cleaning up location [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1955.173529] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_2852f503-a9fc-4ecb-9c28-df953268697f {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1955.173839] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2db1167-0dd5-4895-a520-74bce72972ef {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.180184] env[62508]: DEBUG nova.compute.utils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1955.182714] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1955.182900] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1955.184798] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1955.184798] env[62508]: value = "task-1776917" [ 1955.184798] env[62508]: _type = "Task" [ 1955.184798] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.194485] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.227039] env[62508]: DEBUG nova.policy [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1955.487275] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Successfully created port: f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.689236] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1955.708058] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208839} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.710220] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1955.710831] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.712405] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk to [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1955.714182] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c98a92ed-6d9d-48db-808d-3b3c3e3fecd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.722519] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1955.722519] env[62508]: value = "task-1776918" [ 1955.722519] env[62508]: _type = "Task" [ 1955.722519] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.733268] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.233568] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.706184] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1956.735028] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.739644] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1956.739884] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1956.740058] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1956.740253] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1956.740411] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1956.740559] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1956.740763] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1956.740926] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1956.741707] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1956.741936] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1956.742137] env[62508]: DEBUG nova.virt.hardware [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1956.743023] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebf1417-5954-48e0-a8e8-94116992934a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.752695] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961764c5-4840-4099-9bd5-aa6b4a281f02 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.002779] env[62508]: DEBUG nova.compute.manager [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Received event network-vif-plugged-f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1957.002779] env[62508]: DEBUG oslo_concurrency.lockutils [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] Acquiring lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.002779] env[62508]: DEBUG oslo_concurrency.lockutils [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.002779] env[62508]: DEBUG oslo_concurrency.lockutils [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.002779] env[62508]: DEBUG nova.compute.manager [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] No waiting events found dispatching network-vif-plugged-f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1957.003020] env[62508]: WARNING nova.compute.manager [req-ee0518fb-03a3-4b60-8ee4-dfdcd4bd0231 req-640cce40-938d-40d8-a5b1-5d19e00b4f50 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Received unexpected event network-vif-plugged-f7fe1000-8664-4e03-aefb-7b1fab478c58 for instance with vm_state building and task_state spawning. [ 1957.098323] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Successfully updated port: f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1957.235617] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.601418] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.601616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.601785] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.736401] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.243400] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.367298] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1958.603419] env[62508]: DEBUG nova.network.neutron [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Updating instance_info_cache with network_info: [{"id": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "address": "fa:16:3e:09:57:9a", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fe1000-86", "ovs_interfaceid": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.742114] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.030052] env[62508]: DEBUG nova.compute.manager [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Received event network-changed-f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1959.030052] env[62508]: DEBUG nova.compute.manager [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Refreshing instance network info cache due to event network-changed-f7fe1000-8664-4e03-aefb-7b1fab478c58. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1959.030052] env[62508]: DEBUG oslo_concurrency.lockutils [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] Acquiring lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.106298] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.106648] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Instance network_info: |[{"id": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "address": "fa:16:3e:09:57:9a", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fe1000-86", "ovs_interfaceid": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1959.106954] env[62508]: DEBUG oslo_concurrency.lockutils [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] Acquired lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.107160] env[62508]: DEBUG nova.network.neutron [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Refreshing network info cache for port f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1959.108454] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:57:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7fe1000-8664-4e03-aefb-7b1fab478c58', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1959.117977] env[62508]: DEBUG oslo.service.loopingcall [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.118972] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1959.119259] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7a4ad31-ab9a-4b8e-a85f-ed1465f478ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.144925] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1959.144925] env[62508]: value = "task-1776919" [ 1959.144925] env[62508]: _type = "Task" [ 1959.144925] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.156785] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.240206] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776918, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.366663} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.240667] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5/9dd06b6d-16a1-4138-bb0b-d5a3d0d715e5.vmdk to [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1959.245460] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb954fd-66a9-4ccd-8c30-688205972869 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.267465] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1959.268099] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4135adcf-ff70-499f-b70d-5ee673a44c04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.292436] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1959.292436] env[62508]: value = "task-1776920" [ 1959.292436] env[62508]: _type = "Task" [ 1959.292436] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.302339] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776920, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.659243] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.755450] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.755745] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.803817] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776920, 'name': ReconfigVM_Task, 'duration_secs': 0.315363} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.804126] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Reconfigured VM instance instance-0000006c to attach disk [datastore1] d58f5593-aafc-43e0-a040-96af10659b70/d58f5593-aafc-43e0-a040-96af10659b70.vmdk or device None with type streamOptimized {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1959.805043] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc7f9b55-9016-4008-b5b2-94d9befe1430 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.812378] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1959.812378] env[62508]: value = "task-1776921" [ 1959.812378] env[62508]: _type = "Task" [ 1959.812378] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.821994] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776921, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.822875] env[62508]: DEBUG nova.network.neutron [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Updated VIF entry in instance network info cache for port f7fe1000-8664-4e03-aefb-7b1fab478c58. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1959.823227] env[62508]: DEBUG nova.network.neutron [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Updating instance_info_cache with network_info: [{"id": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "address": "fa:16:3e:09:57:9a", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fe1000-86", "ovs_interfaceid": "f7fe1000-8664-4e03-aefb-7b1fab478c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.156733] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.258735] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1960.323396] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776921, 'name': Rename_Task, 'duration_secs': 0.352287} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.323635] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1960.323923] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c17db49b-bed8-4eba-99d1-445fbf71b268 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.325672] env[62508]: DEBUG oslo_concurrency.lockutils [req-43518480-ba70-41d8-be78-ddad37f6e43b req-d6f90277-5284-4c15-9cb7-ced8b3e5cf00 service nova] Releasing lock "refresh_cache-bf7e7324-1fb3-4a54-915f-80ae48a36670" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.331671] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1960.331671] env[62508]: value = "task-1776922" [ 1960.331671] env[62508]: _type = "Task" [ 1960.331671] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.339266] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.657605] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.782211] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.782575] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.784784] env[62508]: INFO nova.compute.claims [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1960.843052] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776922, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.161471] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.342051] env[62508]: DEBUG oslo_vmware.api [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776922, 'name': PowerOnVM_Task, 'duration_secs': 0.521015} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.342331] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.446146] env[62508]: DEBUG nova.compute.manager [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1961.447103] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236bcbc4-5a89-4f37-937d-172fb5b3bc3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.658491] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.861228] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77575fd-39ae-4681-86b5-f92414454b85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.868800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d7b0f5-8091-441f-9c4c-76423bae2a5d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.898130] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c26115-10e2-4a18-9ea3-c5050b385879 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.904954] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d209dc1-ccad-4551-8d3c-e41768dce885 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.917446] env[62508]: DEBUG nova.compute.provider_tree [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1961.963547] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8555f534-8e90-45cf-9986-22d4dd9f89a2 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.693s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.158539] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.437700] env[62508]: ERROR nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [req-6b9efca8-453c-4bf8-bfa3-6f9d557e70ba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b9efca8-453c-4bf8-bfa3-6f9d557e70ba"}]} [ 1962.452600] env[62508]: DEBUG nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1962.467388] env[62508]: DEBUG nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1962.467703] env[62508]: DEBUG nova.compute.provider_tree [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1962.479458] env[62508]: DEBUG nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1962.500030] env[62508]: DEBUG nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1962.567685] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a0f85c-7b43-45ff-907b-754da334d731 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.575494] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b43029-2f89-44dd-9b8f-caf913b33c85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.605772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8009486b-b517-4931-b34a-6c03c96c3c77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.612659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541dc6ce-979a-44d4-943c-8bd001bda19b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.625424] env[62508]: DEBUG nova.compute.provider_tree [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1962.658155] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.959716] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.960055] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.960239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "d58f5593-aafc-43e0-a040-96af10659b70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.960420] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.960588] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.962692] env[62508]: INFO nova.compute.manager [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Terminating instance [ 1962.964306] env[62508]: DEBUG nova.compute.manager [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1962.964502] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1962.965338] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af946dce-182e-4b9a-9e44-a450081d3f1e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.972866] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.973100] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24bb134f-c57b-4d53-94b5-ad05c59b397f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.979102] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1962.979102] env[62508]: value = "task-1776923" [ 1962.979102] env[62508]: _type = "Task" [ 1962.979102] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.986215] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.160944] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.161928] env[62508]: DEBUG nova.scheduler.client.report [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 192 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1963.162185] env[62508]: DEBUG nova.compute.provider_tree [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 192 to 193 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1963.162363] env[62508]: DEBUG nova.compute.provider_tree [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1963.488986] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776923, 'name': PowerOffVM_Task, 'duration_secs': 0.197646} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.489320] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1963.489489] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1963.489726] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b7164f2-1fd5-491a-adae-776976398a84 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.660097] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.666986] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.884s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.667478] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1964.160409] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.171804] env[62508]: DEBUG nova.compute.utils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1964.173164] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1964.173329] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1964.221580] env[62508]: DEBUG nova.policy [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66400df997044a7ca8b711be48707221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fda0b7fb924f1d97862bf4124f9c20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1964.527976] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Successfully created port: 9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1964.668024] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.677415] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1965.161745] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.593991] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1965.594154] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1965.594479] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleting the datastore file [datastore1] d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1965.594578] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34d70410-5180-4abe-9efa-b79196301afb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.601511] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for the task: (returnval){ [ 1965.601511] env[62508]: value = "task-1776925" [ 1965.601511] env[62508]: _type = "Task" [ 1965.601511] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.609185] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.661848] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.691197] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1965.717712] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1965.717958] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1965.718134] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1965.718322] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1965.718471] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1965.718618] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1965.718821] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1965.718984] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1965.719166] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1965.719333] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1965.719508] env[62508]: DEBUG nova.virt.hardware [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1965.720359] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e8ce57-b9e1-4149-a11d-d19f37c7e7b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.728797] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b2d014-ce3c-49b0-924a-dd45392e9658 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.111297] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.162592] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.485748] env[62508]: DEBUG nova.compute.manager [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Received event network-vif-plugged-9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1966.485993] env[62508]: DEBUG oslo_concurrency.lockutils [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.486218] env[62508]: DEBUG oslo_concurrency.lockutils [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.486340] env[62508]: DEBUG oslo_concurrency.lockutils [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.486512] env[62508]: DEBUG nova.compute.manager [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] No waiting events found dispatching network-vif-plugged-9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1966.486674] env[62508]: WARNING nova.compute.manager [req-6290e845-5152-4ca0-ab85-3db2a29ff811 req-d8d6f924-4f89-475b-8d89-4f4645d07e18 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Received unexpected event network-vif-plugged-9136b65f-0214-4b9c-8c13-28ee6289e941 for instance with vm_state building and task_state spawning. [ 1966.567252] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Successfully updated port: 9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1966.611565] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.662213] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.069782] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.069890] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.070072] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1967.113313] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.162962] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.601740] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1967.613292] env[62508]: DEBUG oslo_vmware.api [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Task: {'id': task-1776925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.705234} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.613519] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1967.613697] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1967.613869] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1967.614114] env[62508]: INFO nova.compute.manager [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Took 4.65 seconds to destroy the instance on the hypervisor. [ 1967.614372] env[62508]: DEBUG oslo.service.loopingcall [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1967.614559] env[62508]: DEBUG nova.compute.manager [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1967.614694] env[62508]: DEBUG nova.network.neutron [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1967.665725] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task} progress is 25%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.730469] env[62508]: DEBUG nova.network.neutron [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [{"id": "9136b65f-0214-4b9c-8c13-28ee6289e941", "address": "fa:16:3e:8d:d7:cd", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9136b65f-02", "ovs_interfaceid": "9136b65f-0214-4b9c-8c13-28ee6289e941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.788021] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.788294] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.165161] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776919, 'name': CreateVM_Task, 'duration_secs': 8.695653} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.165526] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1968.166096] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.166272] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.166610] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1968.166966] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96356a98-d979-40e1-9290-4111c1a07222 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.171351] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1968.171351] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52984a72-371b-cf90-e21a-e33479a59962" [ 1968.171351] env[62508]: _type = "Task" [ 1968.171351] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.180669] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52984a72-371b-cf90-e21a-e33479a59962, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.233538] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.233719] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Instance network_info: |[{"id": "9136b65f-0214-4b9c-8c13-28ee6289e941", "address": "fa:16:3e:8d:d7:cd", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9136b65f-02", "ovs_interfaceid": "9136b65f-0214-4b9c-8c13-28ee6289e941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1968.234363] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:d7:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9136b65f-0214-4b9c-8c13-28ee6289e941', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1968.242347] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating folder: Project (22fda0b7fb924f1d97862bf4124f9c20). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1968.242646] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9de3b2c8-680c-4ecd-afad-a67e1560beaa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.258566] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created folder: Project (22fda0b7fb924f1d97862bf4124f9c20) in parent group-v368536. [ 1968.258743] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating folder: Instances. Parent ref: group-v368854. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1968.259012] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67c12d11-1804-4a44-9678-9bd27ba2188d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.268784] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created folder: Instances in parent group-v368854. [ 1968.269041] env[62508]: DEBUG oslo.service.loopingcall [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1968.269245] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1968.269457] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35082a99-3295-4082-a8b8-be385faf0cbd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.297207] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1968.297207] env[62508]: value = "task-1776928" [ 1968.297207] env[62508]: _type = "Task" [ 1968.297207] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.297207] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.297207] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1968.306157] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776928, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.326380] env[62508]: DEBUG nova.compute.manager [req-bc097d1e-2860-4851-8133-103b5c67ebb9 req-50ff5f2c-657b-4ca2-bd6b-4bb5eb191cb0 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Received event network-vif-deleted-8fad22e1-6bfd-45d7-89b1-d953be11abbc {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1968.326380] env[62508]: INFO nova.compute.manager [req-bc097d1e-2860-4851-8133-103b5c67ebb9 req-50ff5f2c-657b-4ca2-bd6b-4bb5eb191cb0 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Neutron deleted interface 8fad22e1-6bfd-45d7-89b1-d953be11abbc; detaching it from the instance and deleting it from the info cache [ 1968.326380] env[62508]: DEBUG nova.network.neutron [req-bc097d1e-2860-4851-8133-103b5c67ebb9 req-50ff5f2c-657b-4ca2-bd6b-4bb5eb191cb0 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.513079] env[62508]: DEBUG nova.compute.manager [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Received event network-changed-9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1968.513299] env[62508]: DEBUG nova.compute.manager [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Refreshing instance network info cache due to event network-changed-9136b65f-0214-4b9c-8c13-28ee6289e941. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1968.513524] env[62508]: DEBUG oslo_concurrency.lockutils [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] Acquiring lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.513650] env[62508]: DEBUG oslo_concurrency.lockutils [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] Acquired lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.513811] env[62508]: DEBUG nova.network.neutron [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Refreshing network info cache for port 9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1968.684497] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52984a72-371b-cf90-e21a-e33479a59962, 'name': SearchDatastore_Task, 'duration_secs': 0.01489} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.684693] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.684926] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1968.685367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.685367] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.685515] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.685759] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9cb72c7-39e6-4a4e-94aa-bd5f01ec2fd2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.694158] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.694348] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1968.695077] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67dcbcf6-c283-4b46-bb28-9e4d2df314e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.700012] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1968.700012] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252680f-3755-7ca8-9708-0b16c1341c7c" [ 1968.700012] env[62508]: _type = "Task" [ 1968.700012] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.707865] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252680f-3755-7ca8-9708-0b16c1341c7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.800747] env[62508]: DEBUG nova.network.neutron [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.810699] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776928, 'name': CreateVM_Task, 'duration_secs': 0.312054} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.811311] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1968.811966] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.812220] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.812534] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1968.813030] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3b6c843-43b2-4a89-ba2c-86ad56dee0b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.818480] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1968.818480] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5253b5da-79ed-e619-2ae4-203d8026f813" [ 1968.818480] env[62508]: _type = "Task" [ 1968.818480] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.826829] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5253b5da-79ed-e619-2ae4-203d8026f813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.829446] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c49e8a94-8d46-49bc-a716-dffd1c4c33db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.831674] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.831811] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.831956] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1968.839392] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef73bfb5-4208-46e6-b355-e61d5cccddd6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.866296] env[62508]: DEBUG nova.compute.manager [req-bc097d1e-2860-4851-8133-103b5c67ebb9 req-50ff5f2c-657b-4ca2-bd6b-4bb5eb191cb0 service nova] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Detach interface failed, port_id=8fad22e1-6bfd-45d7-89b1-d953be11abbc, reason: Instance d58f5593-aafc-43e0-a040-96af10659b70 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1969.207479] env[62508]: DEBUG nova.network.neutron [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updated VIF entry in instance network info cache for port 9136b65f-0214-4b9c-8c13-28ee6289e941. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1969.207819] env[62508]: DEBUG nova.network.neutron [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [{"id": "9136b65f-0214-4b9c-8c13-28ee6289e941", "address": "fa:16:3e:8d:d7:cd", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9136b65f-02", "ovs_interfaceid": "9136b65f-0214-4b9c-8c13-28ee6289e941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.212219] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5252680f-3755-7ca8-9708-0b16c1341c7c, 'name': SearchDatastore_Task, 'duration_secs': 0.008882} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.213163] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bbbd8bc-e06c-4ed6-8a79-b7947624406e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.219278] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1969.219278] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227f70a-c52a-8841-7836-4155a2b5bdba" [ 1969.219278] env[62508]: _type = "Task" [ 1969.219278] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.227646] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227f70a-c52a-8841-7836-4155a2b5bdba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.306431] env[62508]: INFO nova.compute.manager [-] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Took 1.69 seconds to deallocate network for instance. [ 1969.328816] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5253b5da-79ed-e619-2ae4-203d8026f813, 'name': SearchDatastore_Task, 'duration_secs': 0.009367} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.329124] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.329348] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1969.329554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.713342] env[62508]: DEBUG oslo_concurrency.lockutils [req-08d3e42a-bcaf-42d9-a88b-2f41d38e485d req-f79a92dd-7de7-4171-96c6-46744391782a service nova] Releasing lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.729386] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5227f70a-c52a-8841-7836-4155a2b5bdba, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.729630] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.729880] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bf7e7324-1fb3-4a54-915f-80ae48a36670/bf7e7324-1fb3-4a54-915f-80ae48a36670.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1969.730165] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.730355] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1969.730570] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a34c2c9-1752-4c7b-985f-040b17b592ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.732612] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20b7b9ef-3f88-4088-9f6a-689e3e1ee147 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.738591] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1969.738591] env[62508]: value = "task-1776929" [ 1969.738591] env[62508]: _type = "Task" [ 1969.738591] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.742616] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1969.742828] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1969.746119] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71ddbe6a-cd02-46a1-b9c3-e3a2c474a58f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.748164] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.751129] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1969.751129] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264eb46-0c41-3cc0-1a53-a58dbc5beb31" [ 1969.751129] env[62508]: _type = "Task" [ 1969.751129] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.759340] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264eb46-0c41-3cc0-1a53-a58dbc5beb31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.814616] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.814869] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.815219] env[62508]: DEBUG nova.objects.instance [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lazy-loading 'resources' on Instance uuid d58f5593-aafc-43e0-a040-96af10659b70 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1970.061832] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [{"id": "60434e32-b866-43d2-8a33-8925c4459e29", "address": "fa:16:3e:3f:a7:2d", "network": {"id": "7fdcf35b-d562-4926-a8b1-15143df837c1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-791265259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86a46b5a43dd41e48816a8d86e3685b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60434e32-b8", "ovs_interfaceid": "60434e32-b866-43d2-8a33-8925c4459e29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.248285] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471707} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.248588] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] bf7e7324-1fb3-4a54-915f-80ae48a36670/bf7e7324-1fb3-4a54-915f-80ae48a36670.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1970.248765] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1970.248984] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd650ef0-84dd-47e7-93c7-4ec99adf2f35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.256398] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1970.256398] env[62508]: value = "task-1776930" [ 1970.256398] env[62508]: _type = "Task" [ 1970.256398] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.264300] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5264eb46-0c41-3cc0-1a53-a58dbc5beb31, 'name': SearchDatastore_Task, 'duration_secs': 0.008365} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.265478] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44044063-b019-4523-a4ad-330564bc75b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.273375] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.274342] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1970.274342] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be06b1-5cf7-a56e-0f4d-a16dc65e1285" [ 1970.274342] env[62508]: _type = "Task" [ 1970.274342] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.282475] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be06b1-5cf7-a56e-0f4d-a16dc65e1285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.388908] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359024d7-23e5-4d24-95d5-161af543ba6d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.396211] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec831e8-9a7c-41c8-8203-a0afd6c4e87f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.426183] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7727e818-593c-43fd-838b-e17b85246859 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.433036] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3402cbc-c0af-413e-b27a-4c46ad0443f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.445695] env[62508]: DEBUG nova.compute.provider_tree [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.565423] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-aea987d3-1daf-45f5-84c3-893eb6bdb57a" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.565664] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1970.565882] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566053] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566208] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566359] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566499] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566643] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.566769] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1970.566911] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.767336] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068008} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.767611] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1970.768416] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3019c4ce-9cc7-4bfa-85cb-86bbbc7d1e03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.791250] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] bf7e7324-1fb3-4a54-915f-80ae48a36670/bf7e7324-1fb3-4a54-915f-80ae48a36670.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1970.796639] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b27a5f9-1db8-44be-bd84-aed8e621bcc4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.817152] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52be06b1-5cf7-a56e-0f4d-a16dc65e1285, 'name': SearchDatastore_Task, 'duration_secs': 0.010527} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.818392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.818655] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] db292f08-6a5d-401e-bdba-a368cde4cd39/db292f08-6a5d-401e-bdba-a368cde4cd39.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1970.818971] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1970.818971] env[62508]: value = "task-1776931" [ 1970.818971] env[62508]: _type = "Task" [ 1970.818971] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.819172] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ee7db4f-3682-4c6f-8f63-08d2266adb6a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.829794] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776931, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.830950] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1970.830950] env[62508]: value = "task-1776932" [ 1970.830950] env[62508]: _type = "Task" [ 1970.830950] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.839120] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.949162] env[62508]: DEBUG nova.scheduler.client.report [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1971.070637] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.331583] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.339734] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776932, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.455385] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.640s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.457779] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.387s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.457977] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.458154] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1971.459136] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f140e227-e11e-4a37-9323-b22a0de912ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.467741] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ed0ac9-74aa-402d-b0a7-8ca4557f50b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.483080] env[62508]: INFO nova.scheduler.client.report [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Deleted allocations for instance d58f5593-aafc-43e0-a040-96af10659b70 [ 1971.484678] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96242f73-5a35-45ad-a146-b09874596ecb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.494634] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ff5881-d888-49e2-9a3e-e3de70fe0a6c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.527394] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180424MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1971.527594] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.527745] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.833795] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.842374] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776932, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.994499] env[62508]: DEBUG oslo_concurrency.lockutils [None req-39c1beae-48a7-424d-9fda-346ef54ae7f0 tempest-ServerActionsTestOtherB-1159041240 tempest-ServerActionsTestOtherB-1159041240-project-member] Lock "d58f5593-aafc-43e0-a040-96af10659b70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.034s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.332780] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776931, 'name': ReconfigVM_Task, 'duration_secs': 1.172476} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.333121] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Reconfigured VM instance instance-00000075 to attach disk [datastore1] bf7e7324-1fb3-4a54-915f-80ae48a36670/bf7e7324-1fb3-4a54-915f-80ae48a36670.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1972.333737] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65ef997f-bc76-4d50-aef3-b98959451f18 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.344106] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776932, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.090858} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.345469] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] db292f08-6a5d-401e-bdba-a368cde4cd39/db292f08-6a5d-401e-bdba-a368cde4cd39.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1972.345643] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1972.345962] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1972.345962] env[62508]: value = "task-1776933" [ 1972.345962] env[62508]: _type = "Task" [ 1972.345962] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.346150] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf13241a-e8e1-48ed-b57c-db7997185430 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.356200] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776933, 'name': Rename_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.357379] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1972.357379] env[62508]: value = "task-1776934" [ 1972.357379] env[62508]: _type = "Task" [ 1972.357379] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.365241] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776934, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.550571] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance cd2424b1-3842-4df4-8636-23417833ea49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1972.550730] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance aea987d3-1daf-45f5-84c3-893eb6bdb57a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1972.550856] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance bf7e7324-1fb3-4a54-915f-80ae48a36670 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1972.550975] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance db292f08-6a5d-401e-bdba-a368cde4cd39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1972.551175] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1972.551314] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1972.603595] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bea302-81ea-48e9-9be7-ad33331e3ced {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.611448] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49394c7c-fcda-43f8-97c2-b5cc8688634b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.641659] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404a42da-dac9-43e1-9fc6-be255d1c31e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.648792] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f819338-9293-48f6-8200-e337803c095c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.661517] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.858707] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776933, 'name': Rename_Task, 'duration_secs': 0.184031} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.861952] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1972.862262] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72fa5038-602c-423d-a900-516acb0fcf04 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.868995] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776934, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106849} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.870199] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1972.870564] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1972.870564] env[62508]: value = "task-1776936" [ 1972.870564] env[62508]: _type = "Task" [ 1972.870564] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.871287] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecae4008-18df-4bf7-8f00-e8fb51c91c87 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.881479] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.901244] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] db292f08-6a5d-401e-bdba-a368cde4cd39/db292f08-6a5d-401e-bdba-a368cde4cd39.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1972.901534] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61dbd466-90db-4603-a0da-3320f171271e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.920168] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1972.920168] env[62508]: value = "task-1776937" [ 1972.920168] env[62508]: _type = "Task" [ 1972.920168] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.928141] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.194144] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 193 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1973.194433] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 193 to 194 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1973.194602] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1973.382810] env[62508]: DEBUG oslo_vmware.api [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776936, 'name': PowerOnVM_Task, 'duration_secs': 0.508608} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.383102] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1973.383292] env[62508]: INFO nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Took 16.68 seconds to spawn the instance on the hypervisor. [ 1973.383460] env[62508]: DEBUG nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1973.384274] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0eec73-b589-491d-82ac-eeb1c57e711b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.429876] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776937, 'name': ReconfigVM_Task, 'duration_secs': 0.283769} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.430171] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfigured VM instance instance-00000076 to attach disk [datastore1] db292f08-6a5d-401e-bdba-a368cde4cd39/db292f08-6a5d-401e-bdba-a368cde4cd39.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1973.430774] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2adc9590-82b7-4cb2-900d-47c6b630e550 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.438131] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1973.438131] env[62508]: value = "task-1776938" [ 1973.438131] env[62508]: _type = "Task" [ 1973.438131] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.445461] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776938, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.699873] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1973.700088] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.172s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.901823] env[62508]: INFO nova.compute.manager [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Took 21.99 seconds to build instance. [ 1973.949028] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776938, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.403108] env[62508]: DEBUG oslo_concurrency.lockutils [None req-8974cdfb-71e2-4196-b7bb-09430c870ff4 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.507s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.448843] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776938, 'name': Rename_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.759974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.760234] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.760451] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.760632] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.760801] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.762983] env[62508]: INFO nova.compute.manager [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Terminating instance [ 1974.764822] env[62508]: DEBUG nova.compute.manager [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1974.765018] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1974.765917] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e86065b-f331-48a7-b3bf-610a56f3ec35 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.773535] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1974.774059] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50b1f5e1-5a5d-422d-9a4d-8b3c37456a00 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.780414] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1974.780414] env[62508]: value = "task-1776939" [ 1974.780414] env[62508]: _type = "Task" [ 1974.780414] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.788273] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.948453] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776938, 'name': Rename_Task, 'duration_secs': 1.205732} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.948730] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1974.948993] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-650b7ebe-d390-44d5-b00a-2521dc892e1d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.955217] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 1974.955217] env[62508]: value = "task-1776940" [ 1974.955217] env[62508]: _type = "Task" [ 1974.955217] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.963765] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.290694] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776939, 'name': PowerOffVM_Task, 'duration_secs': 0.206516} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.290908] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1975.291127] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1975.291456] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b095cdc-5d2a-40b9-9c98-43e6635af9f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.465149] env[62508]: DEBUG oslo_vmware.api [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776940, 'name': PowerOnVM_Task, 'duration_secs': 0.442419} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.465456] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1975.465680] env[62508]: INFO nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1975.465861] env[62508]: DEBUG nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1975.466651] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba0b179-2927-41c4-9238-c34eaf30de5b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.504936] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1975.505328] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1975.505597] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] bf7e7324-1fb3-4a54-915f-80ae48a36670 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1975.506048] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84d4b8c7-9cc0-4937-9f14-b7293bf333a9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.513570] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1975.513570] env[62508]: value = "task-1776942" [ 1975.513570] env[62508]: _type = "Task" [ 1975.513570] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.521935] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.985932] env[62508]: INFO nova.compute.manager [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Took 15.22 seconds to build instance. [ 1976.024245] env[62508]: DEBUG oslo_vmware.api [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163763} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.024457] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1976.024649] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1976.024830] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1976.025203] env[62508]: INFO nova.compute.manager [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1976.025511] env[62508]: DEBUG oslo.service.loopingcall [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1976.025719] env[62508]: DEBUG nova.compute.manager [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1976.025816] env[62508]: DEBUG nova.network.neutron [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1976.225227] env[62508]: DEBUG nova.compute.manager [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Received event network-changed-9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1976.225461] env[62508]: DEBUG nova.compute.manager [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Refreshing instance network info cache due to event network-changed-9136b65f-0214-4b9c-8c13-28ee6289e941. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1976.225682] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] Acquiring lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.225817] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] Acquired lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.225976] env[62508]: DEBUG nova.network.neutron [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Refreshing network info cache for port 9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1976.379778] env[62508]: DEBUG nova.compute.manager [req-f602d016-db99-435e-9ba9-bffb55b13780 req-2fbbf78d-7e41-499c-84d2-7fccebdb9cee service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Received event network-vif-deleted-f7fe1000-8664-4e03-aefb-7b1fab478c58 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1976.380294] env[62508]: INFO nova.compute.manager [req-f602d016-db99-435e-9ba9-bffb55b13780 req-2fbbf78d-7e41-499c-84d2-7fccebdb9cee service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Neutron deleted interface f7fe1000-8664-4e03-aefb-7b1fab478c58; detaching it from the instance and deleting it from the info cache [ 1976.380679] env[62508]: DEBUG nova.network.neutron [req-f602d016-db99-435e-9ba9-bffb55b13780 req-2fbbf78d-7e41-499c-84d2-7fccebdb9cee service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.488471] env[62508]: DEBUG oslo_concurrency.lockutils [None req-6b47c6f1-7db3-4818-8f3f-bbfde30958a7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.733s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.853181] env[62508]: DEBUG nova.network.neutron [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.883928] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ac70cb7-f454-4215-a2ab-3a30ad1844e3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.894287] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b60c496-0123-4362-98f5-8471f267c03a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.920498] env[62508]: DEBUG nova.compute.manager [req-f602d016-db99-435e-9ba9-bffb55b13780 req-2fbbf78d-7e41-499c-84d2-7fccebdb9cee service nova] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Detach interface failed, port_id=f7fe1000-8664-4e03-aefb-7b1fab478c58, reason: Instance bf7e7324-1fb3-4a54-915f-80ae48a36670 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1977.074461] env[62508]: DEBUG nova.network.neutron [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updated VIF entry in instance network info cache for port 9136b65f-0214-4b9c-8c13-28ee6289e941. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1977.074822] env[62508]: DEBUG nova.network.neutron [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [{"id": "9136b65f-0214-4b9c-8c13-28ee6289e941", "address": "fa:16:3e:8d:d7:cd", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9136b65f-02", "ovs_interfaceid": "9136b65f-0214-4b9c-8c13-28ee6289e941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.359161] env[62508]: INFO nova.compute.manager [-] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Took 1.33 seconds to deallocate network for instance. [ 1977.578244] env[62508]: DEBUG oslo_concurrency.lockutils [req-a1e9ae93-919b-45d0-8f78-16d650425895 req-8c3dd635-b178-4c04-90d1-f7290cb304db service nova] Releasing lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.866668] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.866948] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.867186] env[62508]: DEBUG nova.objects.instance [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid bf7e7324-1fb3-4a54-915f-80ae48a36670 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.443921] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b956e06-fc3f-4b2b-b9bf-eb5bc3813f82 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.452194] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33e7ae6-5ecf-4690-83e8-feab899d4c17 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.487118] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d459c087-83a1-4e0e-81c8-4fb1b8ea78b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.494903] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb15450-e173-4562-8923-035b0a1a18d2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.508436] env[62508]: DEBUG nova.compute.provider_tree [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.014057] env[62508]: DEBUG nova.scheduler.client.report [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1979.521136] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.651s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.542058] env[62508]: INFO nova.scheduler.client.report [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance bf7e7324-1fb3-4a54-915f-80ae48a36670 [ 1980.052949] env[62508]: DEBUG oslo_concurrency.lockutils [None req-88b2e42f-ee35-4cf8-87b8-1945d7b0c5a2 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "bf7e7324-1fb3-4a54-915f-80ae48a36670" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.292s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.784069] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.784314] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.287073] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1981.813020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.813020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.814416] env[62508]: INFO nova.compute.claims [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1983.161857] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b091e505-f87c-44d8-8780-3bae702e3300 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.171362] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf305b2-f1c5-4591-be39-1480a7c3e9e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.204304] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459394fb-d9ae-4b32-bc7c-0479d29a4695 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.212485] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df94320a-8280-44ec-aebc-265b5a86e17c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.226264] env[62508]: DEBUG nova.compute.provider_tree [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.729621] env[62508]: DEBUG nova.scheduler.client.report [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1984.235430] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.235983] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1984.742164] env[62508]: DEBUG nova.compute.utils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1984.746339] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1984.746590] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1984.805606] env[62508]: DEBUG nova.policy [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81273f5ad53746e2bc89a7f2f7b7a727', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e6f83751b0446fb8f00684082f018a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 1985.052520] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Successfully created port: d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1985.251079] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1986.259927] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1986.286081] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1986.286337] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1986.286493] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1986.286676] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1986.286849] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1986.287029] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1986.287239] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1986.287398] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1986.287559] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1986.287723] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1986.287892] env[62508]: DEBUG nova.virt.hardware [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1986.288782] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397164c4-3ba3-43f3-9dd4-cd795cfea7b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.296935] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25624cae-a3e8-41d5-8949-4fd570df2b7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.434999] env[62508]: DEBUG nova.compute.manager [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Received event network-vif-plugged-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1986.435346] env[62508]: DEBUG oslo_concurrency.lockutils [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] Acquiring lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.435483] env[62508]: DEBUG oslo_concurrency.lockutils [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.435635] env[62508]: DEBUG oslo_concurrency.lockutils [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.435806] env[62508]: DEBUG nova.compute.manager [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] No waiting events found dispatching network-vif-plugged-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1986.435970] env[62508]: WARNING nova.compute.manager [req-776e390e-6984-4c69-9538-f065a02af9f6 req-dccac784-df29-429b-a850-306b8013543a service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Received unexpected event network-vif-plugged-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 for instance with vm_state building and task_state spawning. [ 1986.524830] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Successfully updated port: d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1987.028426] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.028426] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.028426] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1987.559254] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1987.699581] env[62508]: DEBUG nova.network.neutron [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Updating instance_info_cache with network_info: [{"id": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "address": "fa:16:3e:0b:02:f8", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7e04be0-3c", "ovs_interfaceid": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1988.202291] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.202745] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Instance network_info: |[{"id": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "address": "fa:16:3e:0b:02:f8", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7e04be0-3c", "ovs_interfaceid": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1988.203287] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:02:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7e04be0-3c15-4bc2-8c1a-514cb9bdf143', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1988.211099] env[62508]: DEBUG oslo.service.loopingcall [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1988.211406] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1988.212060] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-800c3fc1-1ff8-4c76-9eef-f5aa32829be3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.232151] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1988.232151] env[62508]: value = "task-1776948" [ 1988.232151] env[62508]: _type = "Task" [ 1988.232151] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.240105] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776948, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.461302] env[62508]: DEBUG nova.compute.manager [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Received event network-changed-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1988.461503] env[62508]: DEBUG nova.compute.manager [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Refreshing instance network info cache due to event network-changed-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1988.461715] env[62508]: DEBUG oslo_concurrency.lockutils [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] Acquiring lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.461857] env[62508]: DEBUG oslo_concurrency.lockutils [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] Acquired lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.462067] env[62508]: DEBUG nova.network.neutron [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Refreshing network info cache for port d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1988.741309] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776948, 'name': CreateVM_Task, 'duration_secs': 0.426171} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.741676] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1988.742134] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.742300] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.742634] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1988.742914] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771aa091-6890-4376-97fc-56260be9a5e6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.747313] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1988.747313] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dec85c-9b2c-7417-708a-644354394562" [ 1988.747313] env[62508]: _type = "Task" [ 1988.747313] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.754404] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dec85c-9b2c-7417-708a-644354394562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.167275] env[62508]: DEBUG nova.network.neutron [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Updated VIF entry in instance network info cache for port d7e04be0-3c15-4bc2-8c1a-514cb9bdf143. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1989.167647] env[62508]: DEBUG nova.network.neutron [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Updating instance_info_cache with network_info: [{"id": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "address": "fa:16:3e:0b:02:f8", "network": {"id": "1a919f7f-da20-45aa-9c2c-e56ade666fa4", "bridge": "br-int", "label": "tempest-ServersTestJSON-271902367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e6f83751b0446fb8f00684082f018a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7e04be0-3c", "ovs_interfaceid": "d7e04be0-3c15-4bc2-8c1a-514cb9bdf143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.257847] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52dec85c-9b2c-7417-708a-644354394562, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.258201] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.258419] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1989.258688] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.258854] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.259055] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1989.259340] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-451e5d9c-239f-4c0b-a3f7-3cc1ab7afb64 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.267846] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1989.268034] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1989.268766] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8092f018-c9c1-4964-a552-80895f034daa {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.273823] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1989.273823] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b6da5c-e5e8-e241-0840-5b8af5a0c753" [ 1989.273823] env[62508]: _type = "Task" [ 1989.273823] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.282347] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b6da5c-e5e8-e241-0840-5b8af5a0c753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.671069] env[62508]: DEBUG oslo_concurrency.lockutils [req-29129402-8d80-4ea0-b244-bd17d5ec6fe0 req-074dfb0c-4007-465f-b326-ba9c936b9002 service nova] Releasing lock "refresh_cache-fa5933ef-8cfb-4815-bb03-b76d89841df6" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.700846] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.701107] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.701315] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.701494] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.701666] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.703905] env[62508]: INFO nova.compute.manager [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Terminating instance [ 1989.705744] env[62508]: DEBUG nova.compute.manager [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1989.705937] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1989.706854] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e582d9c1-93b0-4b17-8d1a-5f3e76bb9a13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.714974] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1989.715217] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed0133ec-27d0-4e71-a5a8-cb4ae7d75f65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.720541] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1989.720541] env[62508]: value = "task-1776950" [ 1989.720541] env[62508]: _type = "Task" [ 1989.720541] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.728049] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.783904] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52b6da5c-e5e8-e241-0840-5b8af5a0c753, 'name': SearchDatastore_Task, 'duration_secs': 0.009177} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.784703] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faad16e5-5010-467c-931f-4f390b93817d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.789590] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1989.789590] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3d18d-6628-32ef-6f25-7856a2d8d1ee" [ 1989.789590] env[62508]: _type = "Task" [ 1989.789590] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.796890] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3d18d-6628-32ef-6f25-7856a2d8d1ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.230544] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776950, 'name': PowerOffVM_Task, 'duration_secs': 0.213703} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.230813] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1990.230986] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1990.231240] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2cb29b3-470e-4af7-86a5-4d15e15223ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.299949] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c3d18d-6628-32ef-6f25-7856a2d8d1ee, 'name': SearchDatastore_Task, 'duration_secs': 0.015208} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.300232] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.300492] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fa5933ef-8cfb-4815-bb03-b76d89841df6/fa5933ef-8cfb-4815-bb03-b76d89841df6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1990.300752] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9890e7ae-4d0b-4d6e-a932-b770777663c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.308863] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1990.308863] env[62508]: value = "task-1776952" [ 1990.308863] env[62508]: _type = "Task" [ 1990.308863] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.316559] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.818456] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776952, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.319678] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565278} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.320241] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] fa5933ef-8cfb-4815-bb03-b76d89841df6/fa5933ef-8cfb-4815-bb03-b76d89841df6.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1991.320241] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1991.320427] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80811414-771c-4a11-8abf-18f07982520a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.327703] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1991.327703] env[62508]: value = "task-1776954" [ 1991.327703] env[62508]: _type = "Task" [ 1991.327703] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.336104] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.838711] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08601} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.839111] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1991.840024] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8b5dba-0859-4729-899b-065950216ad0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.863498] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] fa5933ef-8cfb-4815-bb03-b76d89841df6/fa5933ef-8cfb-4815-bb03-b76d89841df6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1991.863811] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-226f86ee-2ff4-4f44-871f-77793e4706f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.882929] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1991.882929] env[62508]: value = "task-1776955" [ 1991.882929] env[62508]: _type = "Task" [ 1991.882929] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.890586] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776955, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.392777] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776955, 'name': ReconfigVM_Task, 'duration_secs': 0.341674} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.393068] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Reconfigured VM instance instance-00000077 to attach disk [datastore1] fa5933ef-8cfb-4815-bb03-b76d89841df6/fa5933ef-8cfb-4815-bb03-b76d89841df6.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1992.393724] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c519cf1-bf90-4001-855c-ac11bf4d5295 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.399642] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1992.399642] env[62508]: value = "task-1776956" [ 1992.399642] env[62508]: _type = "Task" [ 1992.399642] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.407158] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776956, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.564266] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1992.564443] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1992.564672] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleting the datastore file [datastore1] aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1992.564951] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8062dacb-9b1f-447c-a09c-7986dac509de {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.571700] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for the task: (returnval){ [ 1992.571700] env[62508]: value = "task-1776957" [ 1992.571700] env[62508]: _type = "Task" [ 1992.571700] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.579570] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.909744] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776956, 'name': Rename_Task, 'duration_secs': 0.184261} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.910270] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1992.910444] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea28dac7-9b57-462d-8828-1c5277b533cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.916019] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1992.916019] env[62508]: value = "task-1776958" [ 1992.916019] env[62508]: _type = "Task" [ 1992.916019] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.923608] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.081195] env[62508]: DEBUG oslo_vmware.api [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Task: {'id': task-1776957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191903} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.081433] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1993.081616] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1993.081802] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1993.081981] env[62508]: INFO nova.compute.manager [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Took 3.38 seconds to destroy the instance on the hypervisor. [ 1993.082261] env[62508]: DEBUG oslo.service.loopingcall [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.082470] env[62508]: DEBUG nova.compute.manager [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1993.082654] env[62508]: DEBUG nova.network.neutron [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1993.430101] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776958, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.600193] env[62508]: DEBUG nova.compute.manager [req-f8e17ba6-fb7a-4f14-80bf-359b09664da6 req-cfff1564-cbef-4f76-aa20-638105a009f7 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Received event network-vif-deleted-60434e32-b866-43d2-8a33-8925c4459e29 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1993.600193] env[62508]: INFO nova.compute.manager [req-f8e17ba6-fb7a-4f14-80bf-359b09664da6 req-cfff1564-cbef-4f76-aa20-638105a009f7 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Neutron deleted interface 60434e32-b866-43d2-8a33-8925c4459e29; detaching it from the instance and deleting it from the info cache [ 1993.600193] env[62508]: DEBUG nova.network.neutron [req-f8e17ba6-fb7a-4f14-80bf-359b09664da6 req-cfff1564-cbef-4f76-aa20-638105a009f7 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.926604] env[62508]: DEBUG oslo_vmware.api [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776958, 'name': PowerOnVM_Task, 'duration_secs': 0.621171} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.926938] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1993.927055] env[62508]: INFO nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1993.927240] env[62508]: DEBUG nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1993.927983] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3c8028-9fd7-4c84-9e37-7b91ed269208 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.077581] env[62508]: DEBUG nova.network.neutron [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.103062] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3db80c53-e868-4a4e-aab1-9ff499290197 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.112790] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470b0468-d95c-4ce5-b99f-4e208c221331 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.139058] env[62508]: DEBUG nova.compute.manager [req-f8e17ba6-fb7a-4f14-80bf-359b09664da6 req-cfff1564-cbef-4f76-aa20-638105a009f7 service nova] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Detach interface failed, port_id=60434e32-b866-43d2-8a33-8925c4459e29, reason: Instance aea987d3-1daf-45f5-84c3-893eb6bdb57a could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1994.443772] env[62508]: INFO nova.compute.manager [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Took 12.65 seconds to build instance. [ 1994.580191] env[62508]: INFO nova.compute.manager [-] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Took 1.50 seconds to deallocate network for instance. [ 1994.947013] env[62508]: DEBUG oslo_concurrency.lockutils [None req-e2f51aec-7356-488b-9b88-7ef870960e4e tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.162s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.086945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.087239] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.087474] env[62508]: DEBUG nova.objects.instance [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lazy-loading 'resources' on Instance uuid aea987d3-1daf-45f5-84c3-893eb6bdb57a {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.534333] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.534590] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.534825] env[62508]: DEBUG nova.compute.manager [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1995.535796] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8a44d8-3242-4439-bb8e-fe665d550098 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.543130] env[62508]: DEBUG nova.compute.manager [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62508) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1995.543681] env[62508]: DEBUG nova.objects.instance [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'flavor' on Instance uuid fa5933ef-8cfb-4815-bb03-b76d89841df6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.654328] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc161b1e-bb74-4aa8-9e63-96f645b5eb96 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.661708] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd3ef22-62c7-485c-8105-f5b4267bc79b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.691352] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aabe08-93e2-4683-8107-2caa24dc9aaf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.698648] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7eb82f5-e234-42d7-99b6-6bc546db038c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.711448] env[62508]: DEBUG nova.compute.provider_tree [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.048921] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.049336] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-653d378d-c5ca-4ac4-89ee-84dadeeedbd4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.056916] env[62508]: DEBUG oslo_vmware.api [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1996.056916] env[62508]: value = "task-1776961" [ 1996.056916] env[62508]: _type = "Task" [ 1996.056916] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.064526] env[62508]: DEBUG oslo_vmware.api [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.214940] env[62508]: DEBUG nova.scheduler.client.report [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1996.566620] env[62508]: DEBUG oslo_vmware.api [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776961, 'name': PowerOffVM_Task, 'duration_secs': 0.174948} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.566877] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1996.567073] env[62508]: DEBUG nova.compute.manager [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1996.567800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3183eb2f-f499-4091-8ffb-9c0d81ca7998 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.720853] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.739194] env[62508]: INFO nova.scheduler.client.report [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Deleted allocations for instance aea987d3-1daf-45f5-84c3-893eb6bdb57a [ 1997.078427] env[62508]: DEBUG oslo_concurrency.lockutils [None req-b3934410-5934-4299-a170-96f389772961 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.246715] env[62508]: DEBUG oslo_concurrency.lockutils [None req-25f647a4-82de-421b-bf47-c8201a6c09ac tempest-AttachVolumeShelveTestJSON-1058986916 tempest-AttachVolumeShelveTestJSON-1058986916-project-member] Lock "aea987d3-1daf-45f5-84c3-893eb6bdb57a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.545s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.155209] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.155557] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.155730] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.155913] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.156111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.158208] env[62508]: INFO nova.compute.manager [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Terminating instance [ 1998.159931] env[62508]: DEBUG nova.compute.manager [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1998.160137] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1998.160967] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a962ad4-6737-4d8a-bd53-79aee496bf77 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.168677] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.168895] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bcf3189-708e-4355-8074-a32c3db06143 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.307813] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.308047] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.308239] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] fa5933ef-8cfb-4815-bb03-b76d89841df6 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.308525] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb7032e6-b24b-415c-a582-dfeaa70cc5fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.314707] env[62508]: DEBUG oslo_vmware.api [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 1998.314707] env[62508]: value = "task-1776964" [ 1998.314707] env[62508]: _type = "Task" [ 1998.314707] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.322215] env[62508]: DEBUG oslo_vmware.api [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.824615] env[62508]: DEBUG oslo_vmware.api [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17872} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.824911] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.825132] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.825311] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.825491] env[62508]: INFO nova.compute.manager [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Took 0.67 seconds to destroy the instance on the hypervisor. [ 1998.825727] env[62508]: DEBUG oslo.service.loopingcall [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.825920] env[62508]: DEBUG nova.compute.manager [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1998.826021] env[62508]: DEBUG nova.network.neutron [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.994093] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1999.078629] env[62508]: DEBUG nova.compute.manager [req-1b1e4cb2-2e6d-4bc2-b472-b924323f706b req-5b41ff76-18d5-4493-ac79-eeba9ac015d8 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Received event network-vif-deleted-d7e04be0-3c15-4bc2-8c1a-514cb9bdf143 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1999.078781] env[62508]: INFO nova.compute.manager [req-1b1e4cb2-2e6d-4bc2-b472-b924323f706b req-5b41ff76-18d5-4493-ac79-eeba9ac015d8 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Neutron deleted interface d7e04be0-3c15-4bc2-8c1a-514cb9bdf143; detaching it from the instance and deleting it from the info cache [ 1999.078942] env[62508]: DEBUG nova.network.neutron [req-1b1e4cb2-2e6d-4bc2-b472-b924323f706b req-5b41ff76-18d5-4493-ac79-eeba9ac015d8 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.558445] env[62508]: DEBUG nova.network.neutron [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.581181] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab528d21-d748-4355-9045-2f98f5d31904 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.591788] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e7fafb-1f13-4169-ba79-5c5185cef924 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.618089] env[62508]: DEBUG nova.compute.manager [req-1b1e4cb2-2e6d-4bc2-b472-b924323f706b req-5b41ff76-18d5-4493-ac79-eeba9ac015d8 service nova] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Detach interface failed, port_id=d7e04be0-3c15-4bc2-8c1a-514cb9bdf143, reason: Instance fa5933ef-8cfb-4815-bb03-b76d89841df6 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2000.061845] env[62508]: INFO nova.compute.manager [-] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Took 1.24 seconds to deallocate network for instance. [ 2000.495599] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.495874] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2000.568552] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.568819] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.569073] env[62508]: DEBUG nova.objects.instance [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid fa5933ef-8cfb-4815-bb03-b76d89841df6 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.125890] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32e1c53-3c63-4f12-9278-13de082612db {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.133562] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7257b6f-938d-4e3b-a7ec-b64bb0cf729b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.163202] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97025133-eada-4540-8852-6cc54e430762 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.170792] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c725b1e-7b3f-4468-a8d2-99c95ad98adc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.185340] env[62508]: DEBUG nova.compute.provider_tree [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.343156] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "001e2821-2e78-4b15-8bb4-0dddff544913" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.343392] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.688380] env[62508]: DEBUG nova.scheduler.client.report [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.846857] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2002.195419] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.219661] env[62508]: INFO nova.scheduler.client.report [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance fa5933ef-8cfb-4815-bb03-b76d89841df6 [ 2002.368024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.368024] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.368024] env[62508]: INFO nova.compute.claims [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2002.509416] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2002.509742] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.727235] env[62508]: DEBUG oslo_concurrency.lockutils [None req-169fb94e-b701-4cbd-8266-96011ebea042 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "fa5933ef-8cfb-4815-bb03-b76d89841df6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.572s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.993190] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.993368] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2002.993520] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.993650] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances with incomplete migration {{(pid=62508) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2003.433975] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060a185e-eef4-4053-a019-93c1e883ad68 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.442529] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3618f98-5f35-4a8f-bfc8-edcd499c73ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.473607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b3a789-6979-4959-b907-a9f41433da5f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.481502] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc6fd97-0a24-4770-95e3-9381f91b5766 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.494593] env[62508]: DEBUG nova.compute.provider_tree [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2003.877487] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "cd2424b1-3842-4df4-8636-23417833ea49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.877850] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.877918] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "cd2424b1-3842-4df4-8636-23417833ea49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.878111] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.878287] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.880638] env[62508]: INFO nova.compute.manager [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Terminating instance [ 2003.882319] env[62508]: DEBUG nova.compute.manager [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2003.882515] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2003.883463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b396787f-b934-4bce-b1b4-e0cc1f00d26c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.890987] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2003.891221] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f19a6f94-ace0-448c-996b-ced42e8062cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.897181] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 2003.897181] env[62508]: value = "task-1776966" [ 2003.897181] env[62508]: _type = "Task" [ 2003.897181] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.904982] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776966, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.998261] env[62508]: DEBUG nova.scheduler.client.report [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2004.406819] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776966, 'name': PowerOffVM_Task, 'duration_secs': 0.201688} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.407149] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2004.407399] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2004.407661] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a361e3c-2cca-42dc-b0cd-b7c54ab558d8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.503698] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.137s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.504982] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2004.579633] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2004.579633] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2004.579943] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleting the datastore file [datastore1] cd2424b1-3842-4df4-8636-23417833ea49 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2004.580107] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3714268-d79e-423c-a08e-ea7c19f264b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.586118] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for the task: (returnval){ [ 2004.586118] env[62508]: value = "task-1776968" [ 2004.586118] env[62508]: _type = "Task" [ 2004.586118] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.593891] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.010391] env[62508]: DEBUG nova.compute.utils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2005.011728] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2005.011913] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2005.049579] env[62508]: DEBUG nova.policy [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ded80cb3c7d488ea88b11aac2536992', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0623a75750841188182d611eed63fe3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 2005.096020] env[62508]: DEBUG oslo_vmware.api [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Task: {'id': task-1776968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12665} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.096360] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2005.096597] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2005.096822] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2005.097011] env[62508]: INFO nova.compute.manager [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2005.097294] env[62508]: DEBUG oslo.service.loopingcall [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2005.097464] env[62508]: DEBUG nova.compute.manager [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2005.097554] env[62508]: DEBUG nova.network.neutron [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2005.303385] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Successfully created port: eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2005.354602] env[62508]: DEBUG nova.compute.manager [req-b1826a25-23d7-4fd6-bf8d-afac8e9eca4e req-e76b954c-3a1c-40d9-a788-dada5920e64b service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Received event network-vif-deleted-405eb618-22d6-4623-a68c-d19671b3adf1 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2005.354895] env[62508]: INFO nova.compute.manager [req-b1826a25-23d7-4fd6-bf8d-afac8e9eca4e req-e76b954c-3a1c-40d9-a788-dada5920e64b service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Neutron deleted interface 405eb618-22d6-4623-a68c-d19671b3adf1; detaching it from the instance and deleting it from the info cache [ 2005.355052] env[62508]: DEBUG nova.network.neutron [req-b1826a25-23d7-4fd6-bf8d-afac8e9eca4e req-e76b954c-3a1c-40d9-a788-dada5920e64b service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.496272] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.515336] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2005.832539] env[62508]: DEBUG nova.network.neutron [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.858900] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5302f58e-6065-42f1-af7b-d90a352975cb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.868466] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c463904f-69b4-48a9-b79e-e189636a19c8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.892657] env[62508]: DEBUG nova.compute.manager [req-b1826a25-23d7-4fd6-bf8d-afac8e9eca4e req-e76b954c-3a1c-40d9-a788-dada5920e64b service nova] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Detach interface failed, port_id=405eb618-22d6-4623-a68c-d19671b3adf1, reason: Instance cd2424b1-3842-4df4-8636-23417833ea49 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2005.999624] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.999887] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.000143] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.000374] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2006.001648] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8da881-8726-4312-b9ac-5beb18e524af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.012195] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fa8322-e61b-46ae-a581-998bce707c38 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.026579] env[62508]: INFO nova.virt.block_device [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Booting with volume 614b8ae6-4410-4156-9cd5-c2eadcc0c105 at /dev/sda [ 2006.028817] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ed3454-5a62-4e0e-90b8-9892ef5e554a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.035706] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ebd779-180a-42d7-92e5-472e49991300 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.065259] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180563MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2006.065418] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.065608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.067987] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1aa10e1d-282b-48b2-8496-212db21e29c7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.076272] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed75938-b857-44df-be2d-5c559f056d0c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.100025] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81ed6f81-7126-4823-9839-715f065f00bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.107877] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff201bb-65d8-44d2-acac-c8678c3e9711 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.133259] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057dc5a0-45e2-4362-aa81-823214e280a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.140271] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f9870b-23a1-49d6-a648-420eb2c03ea5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.154473] env[62508]: DEBUG nova.virt.block_device [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating existing volume attachment record: 91703b84-4a59-44f6-8b1b-4a06c5315cd9 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2006.335772] env[62508]: INFO nova.compute.manager [-] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Took 1.24 seconds to deallocate network for instance. [ 2006.702222] env[62508]: DEBUG nova.compute.manager [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Received event network-vif-plugged-eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2006.702222] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] Acquiring lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.703068] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] Lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.703531] env[62508]: DEBUG oslo_concurrency.lockutils [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] Lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.703961] env[62508]: DEBUG nova.compute.manager [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] No waiting events found dispatching network-vif-plugged-eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2006.704587] env[62508]: WARNING nova.compute.manager [req-b5732547-e23a-4fb1-af58-b1a4af01267c req-cda1ffbb-a44b-4d5a-97ad-c0cb0e611079 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Received unexpected event network-vif-plugged-eaf16365-bc40-43a4-9370-a03f8babd813 for instance with vm_state building and task_state block_device_mapping. [ 2006.783063] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Successfully updated port: eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2006.841798] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.094883] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance cd2424b1-3842-4df4-8636-23417833ea49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2007.095295] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance db292f08-6a5d-401e-bdba-a368cde4cd39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2007.095494] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 001e2821-2e78-4b15-8bb4-0dddff544913 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2007.095817] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2007.096168] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2007.140997] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e007ced-5fea-40f9-b0b0-520454330937 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.148668] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dc1725-26ee-43a5-8b9a-cfaff0836b5a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.178261] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c59540f-cf6a-4605-a28e-b5d5675e8fcd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.185401] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c7d7d4-342a-4998-b035-128198d21857 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.200720] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2007.286205] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.286389] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquired lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.286528] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2007.721077] env[62508]: ERROR nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [req-1937c727-eab1-4fe8-b71c-0d4293767247] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1937c727-eab1-4fe8-b71c-0d4293767247"}]} [ 2007.736196] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2007.750683] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2007.750925] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2007.762318] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2007.780487] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2007.825640] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2007.830911] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b716bc-603f-4f1e-a4a6-0de8f58bdefc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.839198] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c763ff11-d54a-431b-9cbd-fae48e205ba4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.873459] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd99ed1-4ee7-4d85-89e3-251f32f775f5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.881536] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc7680c-bd24-480b-b668-744fc9197be6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.895285] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2007.968135] env[62508]: DEBUG nova.network.neutron [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating instance_info_cache with network_info: [{"id": "eaf16365-bc40-43a4-9370-a03f8babd813", "address": "fa:16:3e:16:20:c3", "network": {"id": "8a171af9-8de1-4e30-a3d0-38b01ce14744", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1348257429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0623a75750841188182d611eed63fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf16365-bc", "ovs_interfaceid": "eaf16365-bc40-43a4-9370-a03f8babd813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.235151] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2008.235685] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2008.235897] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2008.236070] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2008.236265] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2008.236410] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2008.236556] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2008.236777] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2008.236933] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2008.237115] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2008.237290] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2008.237464] env[62508]: DEBUG nova.virt.hardware [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2008.238644] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015b4e84-274e-4c47-823d-d079a5d6a6ce {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.247063] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f0036f-0d15-44e7-8a2f-8a971b19f77e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.426803] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 196 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2008.426803] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 196 to 197 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2008.427026] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2008.470360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Releasing lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.470719] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance network_info: |[{"id": "eaf16365-bc40-43a4-9370-a03f8babd813", "address": "fa:16:3e:16:20:c3", "network": {"id": "8a171af9-8de1-4e30-a3d0-38b01ce14744", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1348257429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0623a75750841188182d611eed63fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf16365-bc", "ovs_interfaceid": "eaf16365-bc40-43a4-9370-a03f8babd813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2008.471139] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:20:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'af454577-0e89-41a3-a9f2-f39716f62fd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf16365-bc40-43a4-9370-a03f8babd813', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2008.478716] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Creating folder: Project (e0623a75750841188182d611eed63fe3). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2008.479260] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72a37780-95e0-4314-a679-83a62a690873 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.494219] env[62508]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2008.494323] env[62508]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62508) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2008.494616] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Folder already exists: Project (e0623a75750841188182d611eed63fe3). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 2008.494805] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Creating folder: Instances. Parent ref: group-v368857. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2008.495079] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83f65f07-f95c-4c57-a03c-04fd8edd2ab0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.503980] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Created folder: Instances in parent group-v368857. [ 2008.504212] env[62508]: DEBUG oslo.service.loopingcall [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.504388] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2008.504576] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5782b118-c936-4530-8825-b12b30597212 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.522342] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2008.522342] env[62508]: value = "task-1776971" [ 2008.522342] env[62508]: _type = "Task" [ 2008.522342] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.529322] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776971, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.728447] env[62508]: DEBUG nova.compute.manager [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Received event network-changed-eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2008.728708] env[62508]: DEBUG nova.compute.manager [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Refreshing instance network info cache due to event network-changed-eaf16365-bc40-43a4-9370-a03f8babd813. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2008.728946] env[62508]: DEBUG oslo_concurrency.lockutils [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] Acquiring lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.729115] env[62508]: DEBUG oslo_concurrency.lockutils [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] Acquired lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.729296] env[62508]: DEBUG nova.network.neutron [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Refreshing network info cache for port eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2008.932245] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2008.932554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.867s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.933457] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.091s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.933457] env[62508]: DEBUG nova.objects.instance [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lazy-loading 'resources' on Instance uuid cd2424b1-3842-4df4-8636-23417833ea49 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2009.032541] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776971, 'name': CreateVM_Task, 'duration_secs': 0.383018} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.032703] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2009.033417] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368861', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'name': 'volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '001e2821-2e78-4b15-8bb4-0dddff544913', 'attached_at': '', 'detached_at': '', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'serial': '614b8ae6-4410-4156-9cd5-c2eadcc0c105'}, 'disk_bus': None, 'guest_format': None, 'attachment_id': '91703b84-4a59-44f6-8b1b-4a06c5315cd9', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=62508) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2009.033630] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Root volume attach. Driver type: vmdk {{(pid=62508) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2009.034411] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9da091-f4c7-47bb-8f36-1a9054948abf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.041840] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3deeca9-2f38-4e1c-bbbb-9ce902988e13 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.047658] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af126e81-89b6-43a9-94cb-c02ab4f51c0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.053319] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2aa5a140-07cd-44cd-87a8-deb7568c0bdd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.059491] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2009.059491] env[62508]: value = "task-1776972" [ 2009.059491] env[62508]: _type = "Task" [ 2009.059491] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.066607] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776972, 'name': RelocateVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.416186] env[62508]: DEBUG nova.network.neutron [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updated VIF entry in instance network info cache for port eaf16365-bc40-43a4-9370-a03f8babd813. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2009.416548] env[62508]: DEBUG nova.network.neutron [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating instance_info_cache with network_info: [{"id": "eaf16365-bc40-43a4-9370-a03f8babd813", "address": "fa:16:3e:16:20:c3", "network": {"id": "8a171af9-8de1-4e30-a3d0-38b01ce14744", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1348257429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0623a75750841188182d611eed63fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf16365-bc", "ovs_interfaceid": "eaf16365-bc40-43a4-9370-a03f8babd813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.487189] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5162372-3c64-469e-a28b-b42ae864357e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.494497] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cc6aff-c04c-4aab-9d39-813e6c0b6564 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.525621] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67236af1-4999-462d-a9de-96e04ab8ad41 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.533172] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1cfb2a-a4c7-4c5f-99d9-29bc91a45d3b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.546083] env[62508]: DEBUG nova.compute.provider_tree [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2009.568007] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776972, 'name': RelocateVM_Task, 'duration_secs': 0.461659} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.568287] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2009.568489] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368861', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'name': 'volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '001e2821-2e78-4b15-8bb4-0dddff544913', 'attached_at': '', 'detached_at': '', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'serial': '614b8ae6-4410-4156-9cd5-c2eadcc0c105'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2009.569203] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c18ac55-ce01-4497-ad96-7643fe8bd873 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.583981] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1657ea-4788-4c94-a0f5-c15cb2edb130 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.604909] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105/volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2009.605158] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a91b759-84f6-440f-9700-9271c55fa670 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.623604] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2009.623604] env[62508]: value = "task-1776973" [ 2009.623604] env[62508]: _type = "Task" [ 2009.623604] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.631155] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.919983] env[62508]: DEBUG oslo_concurrency.lockutils [req-2aa71d13-17e8-4109-b8e6-e0baf09c8004 req-bc18239d-6790-45b1-879b-406d32b75a49 service nova] Releasing lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.049456] env[62508]: DEBUG nova.scheduler.client.report [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2010.133328] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776973, 'name': ReconfigVM_Task, 'duration_secs': 0.298314} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.133602] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105/volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2010.138222] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5c1c03b-0c34-4db1-abd6-7251cef44fa9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.153095] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2010.153095] env[62508]: value = "task-1776974" [ 2010.153095] env[62508]: _type = "Task" [ 2010.153095] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.160573] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.554060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.575843] env[62508]: INFO nova.scheduler.client.report [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Deleted allocations for instance cd2424b1-3842-4df4-8636-23417833ea49 [ 2010.664015] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776974, 'name': ReconfigVM_Task, 'duration_secs': 0.120187} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.664264] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368861', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'name': 'volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '001e2821-2e78-4b15-8bb4-0dddff544913', 'attached_at': '', 'detached_at': '', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'serial': '614b8ae6-4410-4156-9cd5-c2eadcc0c105'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2010.664781] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9905b52-a9ff-47df-a93e-fb52fa2b254b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.671316] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2010.671316] env[62508]: value = "task-1776975" [ 2010.671316] env[62508]: _type = "Task" [ 2010.671316] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.678598] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776975, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.083861] env[62508]: DEBUG oslo_concurrency.lockutils [None req-46ba7a29-1d55-4d0c-94df-583fdf4537f8 tempest-ServersTestJSON-1349594885 tempest-ServersTestJSON-1349594885-project-member] Lock "cd2424b1-3842-4df4-8636-23417833ea49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.206s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.181517] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776975, 'name': Rename_Task, 'duration_secs': 0.130549} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.181894] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2011.182837] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57efbcea-ccee-4ea3-aa9c-1399ab203180 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.188873] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2011.188873] env[62508]: value = "task-1776976" [ 2011.188873] env[62508]: _type = "Task" [ 2011.188873] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.196375] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.702918] env[62508]: DEBUG oslo_vmware.api [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1776976, 'name': PowerOnVM_Task, 'duration_secs': 0.452305} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.702918] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2011.702918] env[62508]: INFO nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Took 3.46 seconds to spawn the instance on the hypervisor. [ 2011.702918] env[62508]: DEBUG nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2011.702918] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36a9d1d-9194-4216-801a-56e4fa515a29 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.218968] env[62508]: INFO nova.compute.manager [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Took 9.87 seconds to build instance. [ 2012.431825] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.432084] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.432249] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.432436] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.722337] env[62508]: DEBUG oslo_concurrency.lockutils [None req-a882f346-f0eb-41b1-ac2c-cb7439a81d3c tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.379s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.993614] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.993801] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Cleaning up deleted instances {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2013.509060] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] There are 44 instances to clean {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2013.509269] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: fa5933ef-8cfb-4815-bb03-b76d89841df6] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2013.513376] env[62508]: DEBUG nova.compute.manager [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Received event network-changed-eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2013.513376] env[62508]: DEBUG nova.compute.manager [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Refreshing instance network info cache due to event network-changed-eaf16365-bc40-43a4-9370-a03f8babd813. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2013.513589] env[62508]: DEBUG oslo_concurrency.lockutils [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] Acquiring lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.513733] env[62508]: DEBUG oslo_concurrency.lockutils [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] Acquired lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.513891] env[62508]: DEBUG nova.network.neutron [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Refreshing network info cache for port eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2013.965689] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.965997] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.016166] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: bf7e7324-1fb3-4a54-915f-80ae48a36670] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.250255] env[62508]: DEBUG nova.network.neutron [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updated VIF entry in instance network info cache for port eaf16365-bc40-43a4-9370-a03f8babd813. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2014.250639] env[62508]: DEBUG nova.network.neutron [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating instance_info_cache with network_info: [{"id": "eaf16365-bc40-43a4-9370-a03f8babd813", "address": "fa:16:3e:16:20:c3", "network": {"id": "8a171af9-8de1-4e30-a3d0-38b01ce14744", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1348257429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0623a75750841188182d611eed63fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf16365-bc", "ovs_interfaceid": "eaf16365-bc40-43a4-9370-a03f8babd813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.468762] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2014.522733] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 5029bdd2-1f52-43ec-a978-b788b15a1204] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.753824] env[62508]: DEBUG oslo_concurrency.lockutils [req-8a5cf3c8-e424-4fc5-9920-2da2900f07f4 req-72b1bab9-c46e-48c9-b104-4734c2d70e70 service nova] Releasing lock "refresh_cache-001e2821-2e78-4b15-8bb4-0dddff544913" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.990459] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.990739] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.992250] env[62508]: INFO nova.compute.claims [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2015.025957] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 1df05ee1-d92d-45be-8337-eba4322bda66] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.319860] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.321047] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.529445] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a63241ff-830a-4724-82ef-ad6c8836d2f5] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.824144] env[62508]: DEBUG nova.compute.utils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2016.032227] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 09918540-a9ce-4c76-84b9-fbe452d5abf3] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.047305] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793d7eb9-c683-4762-85c3-73bad8b3bfae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.054785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ecf51a-fcc5-4d66-b717-e0dc581e1e43 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.091785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1edb64b-7920-4162-879d-14d1b4f7af98 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.099481] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bad1c4-f315-466d-94b7-7240bc90cf4f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.113695] env[62508]: DEBUG nova.compute.provider_tree [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2016.327480] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.535948] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 716fc0ee-9aa7-4d2f-a5e0-024484bbe014] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.616354] env[62508]: DEBUG nova.scheduler.client.report [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2017.039659] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 48d8f1ee-4d35-4a64-a72a-e4a505675c8f] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.121583] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.122159] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2017.385267] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.385760] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.386037] env[62508]: INFO nova.compute.manager [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Attaching volume 3a834111-65b6-4937-bbae-d9af1ba7629e to /dev/sdb [ 2017.417213] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40335350-e999-4309-aba1-c42ad6828659 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.424231] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9779f32c-4715-4620-a93d-e3676eb298af {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.437889] env[62508]: DEBUG nova.virt.block_device [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating existing volume attachment record: 2b876702-2b0e-42eb-b424-bd2498902431 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2017.543457] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a89db7c6-b0d9-44c0-8015-8a96f09200f6] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.627104] env[62508]: DEBUG nova.compute.utils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2017.628509] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Not allocating networking since 'none' was specified. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 2018.047160] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d9341873-6ce8-4410-ae11-768c05c59f64] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2018.130428] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2018.551326] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d58f5593-aafc-43e0-a040-96af10659b70] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.055219] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aea987d3-1daf-45f5-84c3-893eb6bdb57a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.140204] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2019.168841] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2019.169150] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2019.169330] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2019.169569] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2019.169759] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2019.169953] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2019.170215] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2019.170411] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2019.170620] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2019.170831] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2019.171066] env[62508]: DEBUG nova.virt.hardware [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2019.172020] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b1ffa4-fcb5-4d28-9b46-24c547f94c91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.180508] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5151828-f80d-46dc-aa32-dce8304fa560 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.193706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2019.199265] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Creating folder: Project (5bdff54b38554108b3748203b7f5d3b4). Parent ref: group-v368536. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2019.199537] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3fda9f0-d430-4b21-b305-4731c031ea3a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.209732] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Created folder: Project (5bdff54b38554108b3748203b7f5d3b4) in parent group-v368536. [ 2019.209914] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Creating folder: Instances. Parent ref: group-v368866. {{(pid=62508) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2019.210153] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8f837d0-5cc1-49fd-8b30-f9d6f6e5cca1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.218966] env[62508]: INFO nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Created folder: Instances in parent group-v368866. [ 2019.219220] env[62508]: DEBUG oslo.service.loopingcall [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.219413] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2019.219614] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6942f374-9ed8-42b4-ab07-fbdce359b6da {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.235990] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2019.235990] env[62508]: value = "task-1776982" [ 2019.235990] env[62508]: _type = "Task" [ 2019.235990] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.243507] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776982, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.558960] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 42f06dc8-e5f6-475e-ba42-15b4abc3139a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.746076] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776982, 'name': CreateVM_Task, 'duration_secs': 0.280533} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.746324] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2019.746750] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.746950] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.747344] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2019.747631] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7171639-f292-44c4-89e2-55ff252b614b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.751881] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2019.751881] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d4376-6726-630c-e18c-3fe49ea17621" [ 2019.751881] env[62508]: _type = "Task" [ 2019.751881] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.759425] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d4376-6726-630c-e18c-3fe49ea17621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.062160] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a6002275-d684-4609-9935-95180cff36d8] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.263585] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]523d4376-6726-630c-e18c-3fe49ea17621, 'name': SearchDatastore_Task, 'duration_secs': 0.011436} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.263882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.264130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2020.264374] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.264525] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.264707] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2020.264962] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-952f6fd2-d6a0-4ded-8ea6-73c2effe34cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.273169] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2020.273355] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2020.274037] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-545f69f4-c856-4e7d-952b-d95bb90647c0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.278912] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2020.278912] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bea304-02d3-5e1b-6670-1825a906ea7a" [ 2020.278912] env[62508]: _type = "Task" [ 2020.278912] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.285914] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bea304-02d3-5e1b-6670-1825a906ea7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.565401] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d3829b04-6d1f-44f0-8b94-30b582506ed4] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.789875] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52bea304-02d3-5e1b-6670-1825a906ea7a, 'name': SearchDatastore_Task, 'duration_secs': 0.008756} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.790666] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cc9f800-c2ec-4165-a440-6a8d6b7f366f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.795716] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2020.795716] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f9065b-3c24-aa61-3aa9-2ac82eba9350" [ 2020.795716] env[62508]: _type = "Task" [ 2020.795716] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.803119] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f9065b-3c24-aa61-3aa9-2ac82eba9350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.069059] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: cd2424b1-3842-4df4-8636-23417833ea49] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.305883] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f9065b-3c24-aa61-3aa9-2ac82eba9350, 'name': SearchDatastore_Task, 'duration_secs': 0.009265} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.306193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.306376] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2021.306637] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac326150-89f0-412a-a282-2546a812e718 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.312720] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2021.312720] env[62508]: value = "task-1776984" [ 2021.312720] env[62508]: _type = "Task" [ 2021.312720] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.319862] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.572566] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aa7c5176-4420-44b1-9fea-6db7561492c7] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.822742] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471576} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.822961] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2021.823196] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2021.823448] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8d4ba18-4133-497f-a938-ef2f7b66faa7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.829977] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2021.829977] env[62508]: value = "task-1776985" [ 2021.829977] env[62508]: _type = "Task" [ 2021.829977] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.837082] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776985, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.981457] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2021.981697] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368865', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'name': 'volume-3a834111-65b6-4937-bbae-d9af1ba7629e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db292f08-6a5d-401e-bdba-a368cde4cd39', 'attached_at': '', 'detached_at': '', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'serial': '3a834111-65b6-4937-bbae-d9af1ba7629e'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2021.982603] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6f9334-9703-4fae-99fa-fd6bee92b90b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.998403] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c7a4df-f993-421d-94c8-5071ee8b8731 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.022817] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-3a834111-65b6-4937-bbae-d9af1ba7629e/volume-3a834111-65b6-4937-bbae-d9af1ba7629e.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.023061] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0801bd68-ac03-4d3f-ba8a-f9bc721ee100 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.040368] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2022.040368] env[62508]: value = "task-1776986" [ 2022.040368] env[62508]: _type = "Task" [ 2022.040368] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.047820] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776986, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.076577] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: d3455694-a157-404f-8153-a9f96bac49a2] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.339305] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776985, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068044} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.339572] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2022.340358] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eede545-f637-4fdf-842d-57698673abf2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.359579] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.360149] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09f48241-fb8a-4198-8123-27e97597c88c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.379229] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2022.379229] env[62508]: value = "task-1776987" [ 2022.379229] env[62508]: _type = "Task" [ 2022.379229] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.387062] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776987, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.550639] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776986, 'name': ReconfigVM_Task, 'duration_secs': 0.348191} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.550932] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-3a834111-65b6-4937-bbae-d9af1ba7629e/volume-3a834111-65b6-4937-bbae-d9af1ba7629e.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2022.555702] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ef4a53d-4029-404a-96c0-3448a557b320 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.570561] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2022.570561] env[62508]: value = "task-1776988" [ 2022.570561] env[62508]: _type = "Task" [ 2022.570561] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.578223] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.579729] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 544d165c-5054-4c57-a5d9-ac69046c6fbc] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.889566] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776987, 'name': ReconfigVM_Task, 'duration_secs': 0.313892} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.889830] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Reconfigured VM instance instance-00000079 to attach disk [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2022.890450] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02493ef3-8405-4896-9473-b2a5b13ec701 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.897861] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2022.897861] env[62508]: value = "task-1776989" [ 2022.897861] env[62508]: _type = "Task" [ 2022.897861] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.905265] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776989, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.082362] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 3e79a6d4-8639-478e-8753-71ff0e07496f] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2023.084382] env[62508]: DEBUG oslo_vmware.api [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1776988, 'name': ReconfigVM_Task, 'duration_secs': 0.141892} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.084825] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368865', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'name': 'volume-3a834111-65b6-4937-bbae-d9af1ba7629e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db292f08-6a5d-401e-bdba-a368cde4cd39', 'attached_at': '', 'detached_at': '', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'serial': '3a834111-65b6-4937-bbae-d9af1ba7629e'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2023.409343] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776989, 'name': Rename_Task, 'duration_secs': 0.198817} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.409615] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2023.409864] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f69cc3a-1f18-4db9-9ce1-1ecec8c69e12 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.415916] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2023.415916] env[62508]: value = "task-1776990" [ 2023.415916] env[62508]: _type = "Task" [ 2023.415916] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.424535] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.585394] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 0a4958d5-b9a9-4854-90ca-f19eb34cb15b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2023.925515] env[62508]: DEBUG oslo_vmware.api [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776990, 'name': PowerOnVM_Task, 'duration_secs': 0.487245} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.925764] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2023.925970] env[62508]: INFO nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Took 4.79 seconds to spawn the instance on the hypervisor. [ 2023.926161] env[62508]: DEBUG nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2023.926896] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d736b66-7dce-4401-80fc-d4310a94f6a4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.090425] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e7f521db-2dab-4c2c-bf2b-aa6e217f29bd] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2024.123775] env[62508]: DEBUG nova.objects.instance [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid db292f08-6a5d-401e-bdba-a368cde4cd39 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2024.444506] env[62508]: INFO nova.compute.manager [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Took 9.47 seconds to build instance. [ 2024.593965] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b74d8374-d5ae-456b-9e9e-ec09459a737b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2024.628829] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9a6492f4-7bb1-490d-90d8-69f93b201dda tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.243s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.733033] env[62508]: INFO nova.compute.manager [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Rebuilding instance [ 2024.778788] env[62508]: DEBUG nova.compute.manager [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2024.779671] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7129943f-95b1-4480-99fc-ee9b5dcdbcc2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.949049] env[62508]: DEBUG oslo_concurrency.lockutils [None req-f059fb8e-71ed-462e-a269-295cee13a6eb tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.983s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.097749] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 84e4a14a-f3cb-4a71-8e7b-8a583a20b8c3] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.291138] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.291446] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cd98f99-74a8-4f64-b71e-262df722bf48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.299097] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2025.299097] env[62508]: value = "task-1776991" [ 2025.299097] env[62508]: _type = "Task" [ 2025.299097] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.307919] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.601679] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 5da47620-3979-44e8-91c5-154a1fe4ee48] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.733955] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.734206] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.809111] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776991, 'name': PowerOffVM_Task, 'duration_secs': 0.183283} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.809435] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.809681] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.810461] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f64495-3247-4546-a3e4-c8c774bbc615 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.816856] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.817094] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ce69856-c593-4634-99f2-085db26a070a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.843146] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2025.843368] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2025.843564] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Deleting the datastore file [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2025.843821] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c78b00fd-662e-49d7-ab3b-7b405666c7cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.851192] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2025.851192] env[62508]: value = "task-1776993" [ 2025.851192] env[62508]: _type = "Task" [ 2025.851192] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.858865] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.105595] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b83dd148-8cf6-474b-bb19-e0822732b12a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.236229] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2026.361454] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099561} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.361767] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.361998] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2026.362213] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2026.608621] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 2aeb5a4c-785a-4238-8575-ecd1ff84b97c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.761389] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.761653] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.763231] env[62508]: INFO nova.compute.claims [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2027.112266] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 954e23bc-3355-4ab6-ad81-ea7bc55b6ee7] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.399300] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2027.399557] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2027.399716] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2027.399898] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2027.400059] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2027.400217] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2027.400424] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2027.400585] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2027.400782] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2027.400925] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2027.401121] env[62508]: DEBUG nova.virt.hardware [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2027.401985] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285f3e5d-8840-4080-896f-6ea164a83ebb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.411262] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4072337a-300a-416f-9a62-c0eabe94e58f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.425033] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance VIF info [] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2027.431078] env[62508]: DEBUG oslo.service.loopingcall [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2027.431078] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2027.431078] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a466819-b986-473b-b430-ef458670fac9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.447746] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2027.447746] env[62508]: value = "task-1776994" [ 2027.447746] env[62508]: _type = "Task" [ 2027.447746] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.454925] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776994, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.615431] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: b4427ba0-4dcf-4b21-a584-a7fee560f135] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.827109] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e211ee89-9a44-4890-86e5-e97c8d673b23 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.834153] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e85884f-b6ab-4b65-9db5-7e3f98a482dd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.862985] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bc68e6-7bc6-4599-9772-b94cb67a23ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.869620] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3fef08-3a3c-485a-9aa8-c1f6a892f950 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.881921] env[62508]: DEBUG nova.compute.provider_tree [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.958426] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1776994, 'name': CreateVM_Task, 'duration_secs': 0.272895} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.958651] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2027.958991] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2027.959170] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2027.959492] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2027.959733] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44fabc88-4556-4fc2-a57c-7eab34d5d4cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.963877] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2027.963877] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52348e14-c77e-b9cd-a1e4-d4cceedf15b7" [ 2027.963877] env[62508]: _type = "Task" [ 2027.963877] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.971112] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52348e14-c77e-b9cd-a1e4-d4cceedf15b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.119296] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: f3e1c48d-9aaf-415f-8234-82a71bb469ee] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.385139] env[62508]: DEBUG nova.scheduler.client.report [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2028.474312] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52348e14-c77e-b9cd-a1e4-d4cceedf15b7, 'name': SearchDatastore_Task, 'duration_secs': 0.00996} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.474532] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.474770] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2028.475635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.475635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.475635] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2028.475635] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7f61a33-0560-48da-8f28-03c2690a7a0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.483903] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2028.484096] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2028.484787] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-654d6d66-d20e-4d7c-84bc-2facb1c890bc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.489611] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2028.489611] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffbc2c-ec83-273f-b642-775e697f522f" [ 2028.489611] env[62508]: _type = "Task" [ 2028.489611] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.496807] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffbc2c-ec83-273f-b642-775e697f522f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.622285] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 806102ec-7622-4770-91c9-8c5723893dec] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.890362] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.128s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.890899] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2029.000500] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52ffbc2c-ec83-273f-b642-775e697f522f, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.001353] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eb317ee-2c99-45ec-97cc-329678f05837 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.006424] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2029.006424] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f10f62-5a3d-c3b4-5ae1-d5f86b3ba4ea" [ 2029.006424] env[62508]: _type = "Task" [ 2029.006424] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.013680] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f10f62-5a3d-c3b4-5ae1-d5f86b3ba4ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.125222] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 24091abb-f71f-4528-8fc5-b97725cf079e] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2029.395710] env[62508]: DEBUG nova.compute.utils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2029.397171] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2029.397660] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2029.444284] env[62508]: DEBUG nova.policy [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66400df997044a7ca8b711be48707221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fda0b7fb924f1d97862bf4124f9c20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 2029.516588] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52f10f62-5a3d-c3b4-5ae1-d5f86b3ba4ea, 'name': SearchDatastore_Task, 'duration_secs': 0.017964} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.516898] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.517171] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2029.517421] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3e9e3a7-996c-432f-b2b4-10ef186e8060 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.523653] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2029.523653] env[62508]: value = "task-1776995" [ 2029.523653] env[62508]: _type = "Task" [ 2029.523653] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.531176] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.628568] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a058273e-9c68-4d73-9149-ceb60c1c1cda] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2029.723762] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Successfully created port: 882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2029.900275] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2030.033680] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462275} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.033939] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2030.034174] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2030.034420] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f154f1c-8448-44e9-81b6-f435bc95b082 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.040838] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2030.040838] env[62508]: value = "task-1776996" [ 2030.040838] env[62508]: _type = "Task" [ 2030.040838] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.048675] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.132009] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: f465712f-f65a-4521-90ab-e9f5c5b6de5f] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2030.551016] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059643} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.551307] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2030.552078] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed60923-7101-4c53-bd10-fd6ac66e59a5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.571717] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2030.572257] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdb29131-dc0f-4913-909d-3975aac47d7a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.593190] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2030.593190] env[62508]: value = "task-1776997" [ 2030.593190] env[62508]: _type = "Task" [ 2030.593190] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.600890] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.634616] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: bee2cc61-b26c-4d2d-a2aa-ec79b8678e32] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2030.911028] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2030.936974] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2030.937260] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2030.937449] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2030.937642] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2030.937797] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2030.937950] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2030.938175] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2030.938792] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2030.938792] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2030.938792] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2030.938956] env[62508]: DEBUG nova.virt.hardware [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2030.939766] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9d3b5d-a55c-4b6e-834a-b9197efa19be {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.948471] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b2dc66-0f1e-4ec7-8ee6-f0ae5bdb39a6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.104494] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.134331] env[62508]: DEBUG nova.compute.manager [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Received event network-vif-plugged-882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2031.134696] env[62508]: DEBUG oslo_concurrency.lockutils [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] Acquiring lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.134696] env[62508]: DEBUG oslo_concurrency.lockutils [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.134892] env[62508]: DEBUG oslo_concurrency.lockutils [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.134965] env[62508]: DEBUG nova.compute.manager [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] No waiting events found dispatching network-vif-plugged-882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2031.135150] env[62508]: WARNING nova.compute.manager [req-8529b0bf-0d3c-40fc-a833-c99183a6ded9 req-aadafec4-1b5d-492f-94d2-997a5aa575fc service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Received unexpected event network-vif-plugged-882a20ab-228f-41be-ad63-db5d912770ee for instance with vm_state building and task_state spawning. [ 2031.138141] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 145306d7-f0e8-46c0-b2ab-1c41c208f976] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.225465] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Successfully updated port: 882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2031.608708] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.641658] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: deee2c81-4d2c-47d3-aae6-ef829d59c644] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.728606] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.728882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.729147] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2032.105500] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776997, 'name': ReconfigVM_Task, 'duration_secs': 1.285408} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.107068] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Reconfigured VM instance instance-00000079 to attach disk [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251/a7149c0d-b3ff-4119-a71a-29547cdb2251.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2032.107068] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d9d1d82-8fd0-4288-ae43-9751004b0ef9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.112898] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2032.112898] env[62508]: value = "task-1776998" [ 2032.112898] env[62508]: _type = "Task" [ 2032.112898] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.120618] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776998, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.146502] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a8ce13c4-ea95-4343-8eab-8a0dafbf0e03] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.263273] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2032.392225] env[62508]: DEBUG nova.network.neutron [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updating instance_info_cache with network_info: [{"id": "882a20ab-228f-41be-ad63-db5d912770ee", "address": "fa:16:3e:16:dc:8b", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap882a20ab-22", "ovs_interfaceid": "882a20ab-228f-41be-ad63-db5d912770ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.623193] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776998, 'name': Rename_Task, 'duration_secs': 0.215447} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.623423] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2032.623688] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5271063c-8bcf-4a03-962e-c58d1060878c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.629120] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2032.629120] env[62508]: value = "task-1776999" [ 2032.629120] env[62508]: _type = "Task" [ 2032.629120] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.636186] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.650727] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: aa7b27b5-7b40-4de3-a304-5e7cd4ad1b1a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.895027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.895357] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Instance network_info: |[{"id": "882a20ab-228f-41be-ad63-db5d912770ee", "address": "fa:16:3e:16:dc:8b", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap882a20ab-22", "ovs_interfaceid": "882a20ab-228f-41be-ad63-db5d912770ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2032.895876] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:dc:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '882a20ab-228f-41be-ad63-db5d912770ee', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2032.904326] env[62508]: DEBUG oslo.service.loopingcall [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2032.904661] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2032.904984] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26663f5a-52ad-4fd6-8eee-e814211debbe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.926224] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2032.926224] env[62508]: value = "task-1777000" [ 2032.926224] env[62508]: _type = "Task" [ 2032.926224] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.934584] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777000, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.141082] env[62508]: DEBUG oslo_vmware.api [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1776999, 'name': PowerOnVM_Task, 'duration_secs': 0.484679} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.141559] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2033.141897] env[62508]: DEBUG nova.compute.manager [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2033.143040] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c1eba0-4e33-4cf1-819c-9ae77eb01b90 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.154399] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: a10a4217-ae46-4f00-9ba1-cdf74f44ec7b] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2033.161014] env[62508]: DEBUG nova.compute.manager [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Received event network-changed-882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2033.161260] env[62508]: DEBUG nova.compute.manager [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Refreshing instance network info cache due to event network-changed-882a20ab-228f-41be-ad63-db5d912770ee. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2033.161573] env[62508]: DEBUG oslo_concurrency.lockutils [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] Acquiring lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.161801] env[62508]: DEBUG oslo_concurrency.lockutils [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] Acquired lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.162068] env[62508]: DEBUG nova.network.neutron [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Refreshing network info cache for port 882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2033.436391] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777000, 'name': CreateVM_Task, 'duration_secs': 0.354856} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.436624] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2033.437187] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.437356] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.437729] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2033.437981] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78cf4c82-459b-41d2-b56a-96eff26a35fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.442340] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2033.442340] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e7a24-c63b-3bf7-8bc1-b03e2b71c333" [ 2033.442340] env[62508]: _type = "Task" [ 2033.442340] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.450357] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e7a24-c63b-3bf7-8bc1-b03e2b71c333, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.662015] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 7015b188-17ca-45ec-8fe8-f80ef0f9cb0a] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2033.667193] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.667435] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.667608] env[62508]: DEBUG nova.objects.instance [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62508) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2033.900290] env[62508]: DEBUG nova.network.neutron [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updated VIF entry in instance network info cache for port 882a20ab-228f-41be-ad63-db5d912770ee. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2033.900669] env[62508]: DEBUG nova.network.neutron [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updating instance_info_cache with network_info: [{"id": "882a20ab-228f-41be-ad63-db5d912770ee", "address": "fa:16:3e:16:dc:8b", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap882a20ab-22", "ovs_interfaceid": "882a20ab-228f-41be-ad63-db5d912770ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.951972] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]524e7a24-c63b-3bf7-8bc1-b03e2b71c333, 'name': SearchDatastore_Task, 'duration_secs': 0.010814} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.952222] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.952453] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2033.952695] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.952842] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.953060] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2033.953303] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c3e8711-f95f-4b60-804e-b71f7a193f31 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.961307] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2033.961481] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2033.962172] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d511fbf-9314-471b-af65-6966daf7ddcd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.967300] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2033.967300] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5230dd0f-e90f-d12e-6df4-e87ed0c6640e" [ 2033.967300] env[62508]: _type = "Task" [ 2033.967300] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.975816] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5230dd0f-e90f-d12e-6df4-e87ed0c6640e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.168156] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: e478855d-e9c7-4abc-8e22-a4b2eb0c7310] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.403257] env[62508]: DEBUG oslo_concurrency.lockutils [req-73122fed-d10a-47ba-b721-e3de55cb38b7 req-6f27b6f5-a6e6-4f7a-83f6-91f509fff920 service nova] Releasing lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.477910] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5230dd0f-e90f-d12e-6df4-e87ed0c6640e, 'name': SearchDatastore_Task, 'duration_secs': 0.008854} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.478633] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de614120-27c9-4a9d-9d8f-f1220b43796f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.484167] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2034.484167] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c9548d-2c4b-6240-90af-796a3a07b2de" [ 2034.484167] env[62508]: _type = "Task" [ 2034.484167] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.491593] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c9548d-2c4b-6240-90af-796a3a07b2de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.674506] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 95a289ac-3178-45ea-80d2-905b9af54f3c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.677726] env[62508]: DEBUG oslo_concurrency.lockutils [None req-4e5df8a7-bb48-4bd9-8154-580de599a648 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.721554] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.721814] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.722038] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "a7149c0d-b3ff-4119-a71a-29547cdb2251-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.722242] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.722413] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.724409] env[62508]: INFO nova.compute.manager [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Terminating instance [ 2034.725945] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "refresh_cache-a7149c0d-b3ff-4119-a71a-29547cdb2251" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.726135] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquired lock "refresh_cache-a7149c0d-b3ff-4119-a71a-29547cdb2251" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.726308] env[62508]: DEBUG nova.network.neutron [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2034.994809] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52c9548d-2c4b-6240-90af-796a3a07b2de, 'name': SearchDatastore_Task, 'duration_secs': 0.009436} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.994809] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.994999] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38cc40f2-e322-414b-a266-f7b5dcbedaf8/38cc40f2-e322-414b-a266-f7b5dcbedaf8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2034.996220] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d62cc125-4fae-4132-a4d6-95c7d9e44f63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.001824] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2035.001824] env[62508]: value = "task-1777001" [ 2035.001824] env[62508]: _type = "Task" [ 2035.001824] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.009732] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.179718] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: de69dbf0-86f1-4b05-a9db-8b9afaabe49c] Instance has had 0 of 5 cleanup attempts {{(pid=62508) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2035.245453] env[62508]: DEBUG nova.network.neutron [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2035.309312] env[62508]: DEBUG nova.network.neutron [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.512359] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446847} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.512628] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 38cc40f2-e322-414b-a266-f7b5dcbedaf8/38cc40f2-e322-414b-a266-f7b5dcbedaf8.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2035.512845] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2035.513130] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ea729d5-c8e4-4fb3-95e2-6faa1bc440fc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.519866] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2035.519866] env[62508]: value = "task-1777002" [ 2035.519866] env[62508]: _type = "Task" [ 2035.519866] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.527505] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.812582] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Releasing lock "refresh_cache-a7149c0d-b3ff-4119-a71a-29547cdb2251" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.813059] env[62508]: DEBUG nova.compute.manager [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2035.813274] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2035.814182] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6dd8f4-8796-4a75-b874-0cbc1b677ade {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.821604] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2035.821834] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1087bedf-44f8-4e54-83b5-b5760868415d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.828299] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2035.828299] env[62508]: value = "task-1777003" [ 2035.828299] env[62508]: _type = "Task" [ 2035.828299] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.835709] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1777003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.030199] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062125} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.030470] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2036.031256] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5511ce55-e39f-490a-aacd-e046cf449608 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.053144] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 38cc40f2-e322-414b-a266-f7b5dcbedaf8/38cc40f2-e322-414b-a266-f7b5dcbedaf8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2036.053329] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23c93fee-85bc-495a-85c0-e9ec6dd8451c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.072528] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2036.072528] env[62508]: value = "task-1777004" [ 2036.072528] env[62508]: _type = "Task" [ 2036.072528] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.079901] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777004, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.337972] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1777003, 'name': PowerOffVM_Task, 'duration_secs': 0.110946} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.338374] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2036.338437] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2036.338661] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f88bf49-5256-4c51-9df2-b4135d53b605 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.365671] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2036.365922] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2036.366139] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Deleting the datastore file [datastore1] a7149c0d-b3ff-4119-a71a-29547cdb2251 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2036.366392] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fc94ec1-ce2a-4215-9609-7a38544cb793 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.372978] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for the task: (returnval){ [ 2036.372978] env[62508]: value = "task-1777006" [ 2036.372978] env[62508]: _type = "Task" [ 2036.372978] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.380526] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1777006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.581761] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777004, 'name': ReconfigVM_Task, 'duration_secs': 0.289507} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.582140] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 38cc40f2-e322-414b-a266-f7b5dcbedaf8/38cc40f2-e322-414b-a266-f7b5dcbedaf8.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2036.582771] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-860d3355-1e6f-4ba4-94b8-204f2ba2a369 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.588623] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2036.588623] env[62508]: value = "task-1777007" [ 2036.588623] env[62508]: _type = "Task" [ 2036.588623] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.597066] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777007, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.882685] env[62508]: DEBUG oslo_vmware.api [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Task: {'id': task-1777006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095785} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.882909] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.883119] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2036.883276] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2036.883447] env[62508]: INFO nova.compute.manager [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Took 1.07 seconds to destroy the instance on the hypervisor. [ 2036.883698] env[62508]: DEBUG oslo.service.loopingcall [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.883892] env[62508]: DEBUG nova.compute.manager [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2036.883985] env[62508]: DEBUG nova.network.neutron [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2036.898593] env[62508]: DEBUG nova.network.neutron [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2037.098135] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777007, 'name': Rename_Task, 'duration_secs': 0.13881} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.098428] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2037.098661] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99726462-c6ca-4b4f-8f13-ec9b54a87025 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.104981] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2037.104981] env[62508]: value = "task-1777008" [ 2037.104981] env[62508]: _type = "Task" [ 2037.104981] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.111855] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.401470] env[62508]: DEBUG nova.network.neutron [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.614922] env[62508]: DEBUG oslo_vmware.api [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777008, 'name': PowerOnVM_Task, 'duration_secs': 0.449902} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.615197] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2037.615402] env[62508]: INFO nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Took 6.70 seconds to spawn the instance on the hypervisor. [ 2037.615581] env[62508]: DEBUG nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2037.616389] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2eaea13-8881-46e6-90db-846fc1de1c75 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.683673] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.904051] env[62508]: INFO nova.compute.manager [-] [instance: a7149c0d-b3ff-4119-a71a-29547cdb2251] Took 1.02 seconds to deallocate network for instance. [ 2038.132760] env[62508]: INFO nova.compute.manager [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Took 11.39 seconds to build instance. [ 2038.410180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.410517] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.410736] env[62508]: DEBUG nova.objects.instance [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lazy-loading 'resources' on Instance uuid a7149c0d-b3ff-4119-a71a-29547cdb2251 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2038.634474] env[62508]: DEBUG oslo_concurrency.lockutils [None req-994679f4-3d69-4297-9558-4d8d956708a3 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.900s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.819876] env[62508]: DEBUG nova.compute.manager [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Received event network-changed-882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2038.820102] env[62508]: DEBUG nova.compute.manager [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Refreshing instance network info cache due to event network-changed-882a20ab-228f-41be-ad63-db5d912770ee. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2038.820357] env[62508]: DEBUG oslo_concurrency.lockutils [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] Acquiring lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.820510] env[62508]: DEBUG oslo_concurrency.lockutils [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] Acquired lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.820674] env[62508]: DEBUG nova.network.neutron [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Refreshing network info cache for port 882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2038.976133] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625757c0-9e7a-45ec-88da-0385f7453e36 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.984168] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b974050-2703-4473-a1a3-a1f59f880397 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.014143] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad60e37-0a71-4d82-be2a-edcafa2c99b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.021096] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f205abf9-758d-40db-8f5c-29f32b1df8ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.033854] env[62508]: DEBUG nova.compute.provider_tree [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2039.533175] env[62508]: DEBUG nova.network.neutron [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updated VIF entry in instance network info cache for port 882a20ab-228f-41be-ad63-db5d912770ee. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2039.533549] env[62508]: DEBUG nova.network.neutron [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updating instance_info_cache with network_info: [{"id": "882a20ab-228f-41be-ad63-db5d912770ee", "address": "fa:16:3e:16:dc:8b", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap882a20ab-22", "ovs_interfaceid": "882a20ab-228f-41be-ad63-db5d912770ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.554078] env[62508]: ERROR nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] [req-85ffe8ba-2185-45ae-9683-8392325be4b2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-85ffe8ba-2185-45ae-9683-8392325be4b2"}]} [ 2039.568989] env[62508]: DEBUG nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2039.582132] env[62508]: DEBUG nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2039.582361] env[62508]: DEBUG nova.compute.provider_tree [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2039.594070] env[62508]: DEBUG nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2039.612317] env[62508]: DEBUG nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2039.662149] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2086b35-460e-46eb-84ef-48d66e171538 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.669863] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f82e6b-78a9-48de-8565-4de7518ee0f7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.699365] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9919509-55e7-4a66-864d-f6e1b7266572 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.706550] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de6be10-914b-48cc-94f3-8282dd83de0d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.720566] env[62508]: DEBUG nova.compute.provider_tree [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2040.036247] env[62508]: DEBUG oslo_concurrency.lockutils [req-282ebf3f-9c84-44e3-a816-fac28e62837b req-4b4040df-aa6b-4c60-865e-8c4a235314d0 service nova] Releasing lock "refresh_cache-38cc40f2-e322-414b-a266-f7b5dcbedaf8" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.253848] env[62508]: DEBUG nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 199 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2040.254202] env[62508]: DEBUG nova.compute.provider_tree [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 199 to 200 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2040.254439] env[62508]: DEBUG nova.compute.provider_tree [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2040.759603] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.349s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.777441] env[62508]: INFO nova.scheduler.client.report [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Deleted allocations for instance a7149c0d-b3ff-4119-a71a-29547cdb2251 [ 2041.286106] env[62508]: DEBUG oslo_concurrency.lockutils [None req-9d3c823e-c36d-48ed-9c76-55b74eef7489 tempest-ServerShowV254Test-2100148288 tempest-ServerShowV254Test-2100148288-project-member] Lock "a7149c0d-b3ff-4119-a71a-29547cdb2251" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.564s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.070815] env[62508]: INFO nova.compute.manager [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Rebuilding instance [ 2052.109946] env[62508]: DEBUG nova.compute.manager [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2052.110833] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164b7716-4aec-4733-856d-f44146d16df2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.621536] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2052.621837] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90a5fdf1-7cab-40e6-b609-f5006965b901 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.630303] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2052.630303] env[62508]: value = "task-1777009" [ 2052.630303] env[62508]: _type = "Task" [ 2052.630303] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.638834] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.140880] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777009, 'name': PowerOffVM_Task, 'duration_secs': 0.259516} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.141275] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2053.141756] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2053.141997] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4cab3a2-800d-4bfa-95b1-929b57a88320 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.148133] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2053.148133] env[62508]: value = "task-1777010" [ 2053.148133] env[62508]: _type = "Task" [ 2053.148133] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.154960] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.659587] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] VM already powered off {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2053.659796] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2053.660059] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368861', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'name': 'volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '001e2821-2e78-4b15-8bb4-0dddff544913', 'attached_at': '', 'detached_at': '', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'serial': '614b8ae6-4410-4156-9cd5-c2eadcc0c105'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2053.660768] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bd4bdf-8323-4492-8936-d2118b99c0ff {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.678552] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1511be76-2315-4d33-b1bb-a4bcc667d2fe {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.684840] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910da8cd-c17c-4cb3-a345-b69c64dca59d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.702102] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3dc917-e76b-4b61-981a-f73b8aa04a3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.716966] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] The volume has not been displaced from its original location: [datastore1] volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105/volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2053.722224] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2053.722599] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-655c60b3-f615-4bae-a893-302d609c8c9d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.740074] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2053.740074] env[62508]: value = "task-1777011" [ 2053.740074] env[62508]: _type = "Task" [ 2053.740074] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.747383] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.250157] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777011, 'name': ReconfigVM_Task, 'duration_secs': 0.182157} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.250519] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2054.254995] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfd82f58-85c7-4b3b-bf4c-c2a3f82c9a0e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.269744] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2054.269744] env[62508]: value = "task-1777012" [ 2054.269744] env[62508]: _type = "Task" [ 2054.269744] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.277093] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777012, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.779333] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777012, 'name': ReconfigVM_Task, 'duration_secs': 0.103422} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.779581] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368861', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'name': 'volume-614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '001e2821-2e78-4b15-8bb4-0dddff544913', 'attached_at': '', 'detached_at': '', 'volume_id': '614b8ae6-4410-4156-9cd5-c2eadcc0c105', 'serial': '614b8ae6-4410-4156-9cd5-c2eadcc0c105'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2054.779857] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2054.780613] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ed5379-df8a-4037-bc3d-b3464d63d54c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.787203] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2054.787418] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a80b5808-ea82-42b7-bc67-356fe9cb093a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.861432] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2054.861714] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2054.861823] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Deleting the datastore file [datastore1] 001e2821-2e78-4b15-8bb4-0dddff544913 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2054.862097] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80726388-28ce-4c04-939d-a4851adfd84f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.868285] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for the task: (returnval){ [ 2054.868285] env[62508]: value = "task-1777014" [ 2054.868285] env[62508]: _type = "Task" [ 2054.868285] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.875843] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.379961] env[62508]: DEBUG oslo_vmware.api [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Task: {'id': task-1777014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084437} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.380426] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2055.380426] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2055.380568] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2055.433254] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2055.433578] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bdf16aa-ccf5-4820-a47b-504892dbf882 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.442800] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba5d990-cab2-4372-b36a-e23461c4dbea {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.468998] env[62508]: ERROR nova.compute.manager [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Failed to detach volume 614b8ae6-4410-4156-9cd5-c2eadcc0c105 from /dev/sda: nova.exception.InstanceNotFound: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Traceback (most recent call last): [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self.driver.rebuild(**kwargs) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise NotImplementedError() [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] NotImplementedError [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] During handling of the above exception, another exception occurred: [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Traceback (most recent call last): [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self.driver.detach_volume(context, old_connection_info, [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] return self._volumeops.detach_volume(connection_info, instance) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._detach_volume_vmdk(connection_info, instance) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] stable_ref.fetch_moref(session) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] nova.exception.InstanceNotFound: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. [ 2055.468998] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.652690] env[62508]: DEBUG nova.compute.utils [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Build of instance 001e2821-2e78-4b15-8bb4-0dddff544913 aborted: Failed to rebuild volume backed instance. {{(pid=62508) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2055.656364] env[62508]: ERROR nova.compute.manager [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 001e2821-2e78-4b15-8bb4-0dddff544913 aborted: Failed to rebuild volume backed instance. [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Traceback (most recent call last): [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self.driver.rebuild(**kwargs) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise NotImplementedError() [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] NotImplementedError [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] During handling of the above exception, another exception occurred: [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Traceback (most recent call last): [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._detach_root_volume(context, instance, root_bdm) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] with excutils.save_and_reraise_exception(): [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self.force_reraise() [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise self.value [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self.driver.detach_volume(context, old_connection_info, [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] return self._volumeops.detach_volume(connection_info, instance) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._detach_volume_vmdk(connection_info, instance) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] stable_ref.fetch_moref(session) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] nova.exception.InstanceNotFound: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] During handling of the above exception, another exception occurred: [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Traceback (most recent call last): [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] yield [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 2055.656364] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._do_rebuild_instance_with_claim( [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._do_rebuild_instance( [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._rebuild_default_impl(**kwargs) [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] self._rebuild_volume_backed_instance( [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] raise exception.BuildAbortException( [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] nova.exception.BuildAbortException: Build of instance 001e2821-2e78-4b15-8bb4-0dddff544913 aborted: Failed to rebuild volume backed instance. [ 2055.657615] env[62508]: ERROR nova.compute.manager [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] [ 2057.672060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.672060] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.707785] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be183a28-c1b9-460e-baff-437cc2df4a06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.714998] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87edcf6d-6450-446a-8f19-75c7c0eb93bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.745464] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da92b88-2c64-474c-b295-cdf077061c61 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.752607] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee252976-7efd-4cb0-94d5-ed2fdc8c6d59 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.765304] env[62508]: DEBUG nova.compute.provider_tree [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2057.880979] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "001e2821-2e78-4b15-8bb4-0dddff544913" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.881277] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.881500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.881681] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.881853] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.884417] env[62508]: INFO nova.compute.manager [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Terminating instance [ 2057.886383] env[62508]: DEBUG nova.compute.manager [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2057.886664] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3e6aa56-b567-4f62-9a39-9336f460d353 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.895922] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251f034f-00d2-4682-9524-a5e5456d0320 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.920585] env[62508]: WARNING nova.virt.vmwareapi.driver [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. [ 2057.920771] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2057.921047] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-870e41f0-5211-40e0-b2b3-a7c9eab19bae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.928679] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d163e218-a00d-4d25-9b35-3c5a9bf1c33b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.952044] env[62508]: WARNING nova.virt.vmwareapi.vmops [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. [ 2057.952238] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2057.952415] env[62508]: INFO nova.compute.manager [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2057.952647] env[62508]: DEBUG oslo.service.loopingcall [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.952880] env[62508]: DEBUG nova.compute.manager [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2057.953035] env[62508]: DEBUG nova.network.neutron [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2058.285116] env[62508]: ERROR nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [req-bafb26e6-dc43-4c46-bb83-836875704dee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 5d5b4923-a8ac-4688-9f86-2405bd3406a9. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bafb26e6-dc43-4c46-bb83-836875704dee"}]}: nova.exception.BuildAbortException: Build of instance 001e2821-2e78-4b15-8bb4-0dddff544913 aborted: Failed to rebuild volume backed instance. [ 2058.300438] env[62508]: DEBUG nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Refreshing inventories for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2058.313649] env[62508]: DEBUG nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating ProviderTree inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2058.313860] env[62508]: DEBUG nova.compute.provider_tree [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2058.323829] env[62508]: DEBUG nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Refreshing aggregate associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, aggregates: None {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2058.340364] env[62508]: DEBUG nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Refreshing trait associations for resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62508) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2058.376504] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b56235-5712-4f8b-a571-1e6fc562bd60 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.386727] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40677a3d-3310-458e-a5c7-779e2b2d46b7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.425089] env[62508]: DEBUG nova.compute.manager [req-7c84567c-91d3-4148-8814-0db58ee2ed85 req-f98aaaf1-6427-4207-ae76-5c199ecb37e8 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Received event network-vif-deleted-eaf16365-bc40-43a4-9370-a03f8babd813 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2058.425296] env[62508]: INFO nova.compute.manager [req-7c84567c-91d3-4148-8814-0db58ee2ed85 req-f98aaaf1-6427-4207-ae76-5c199ecb37e8 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Neutron deleted interface eaf16365-bc40-43a4-9370-a03f8babd813; detaching it from the instance and deleting it from the info cache [ 2058.425462] env[62508]: DEBUG nova.network.neutron [req-7c84567c-91d3-4148-8814-0db58ee2ed85 req-f98aaaf1-6427-4207-ae76-5c199ecb37e8 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.427533] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f8295b-6ba9-4a68-974b-3227a706e958 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.436970] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec20775-e015-45e5-88c0-b7019a8c7111 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.451429] env[62508]: DEBUG nova.compute.provider_tree [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2058.882492] env[62508]: DEBUG nova.network.neutron [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.928763] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f295c77c-b808-49c3-ad14-ad8abe8c833e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.941887] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad85304-3637-4943-9c7e-1f45f89121c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.972300] env[62508]: DEBUG nova.compute.manager [req-7c84567c-91d3-4148-8814-0db58ee2ed85 req-f98aaaf1-6427-4207-ae76-5c199ecb37e8 service nova] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Detach interface failed, port_id=eaf16365-bc40-43a4-9370-a03f8babd813, reason: Instance 001e2821-2e78-4b15-8bb4-0dddff544913 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2058.987893] env[62508]: DEBUG nova.scheduler.client.report [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updated inventory for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with generation 201 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2058.988178] env[62508]: DEBUG nova.compute.provider_tree [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating resource provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 generation from 201 to 202 during operation: update_inventory {{(pid=62508) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2058.988362] env[62508]: DEBUG nova.compute.provider_tree [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Updating inventory in ProviderTree for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2059.385749] env[62508]: INFO nova.compute.manager [-] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Took 1.43 seconds to deallocate network for instance. [ 2059.495224] env[62508]: DEBUG oslo_concurrency.lockutils [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.495510] env[62508]: INFO nova.compute.manager [None req-aeeb9d26-82a4-44d1-b4f9-01281bb32740 tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Successfully reverted task state from rebuilding on failure for instance. [ 2059.937055] env[62508]: INFO nova.compute.manager [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Took 0.55 seconds to detach 1 volumes for instance. [ 2059.940292] env[62508]: DEBUG nova.compute.manager [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] [instance: 001e2821-2e78-4b15-8bb4-0dddff544913] Deleting volume: 614b8ae6-4410-4156-9cd5-c2eadcc0c105 {{(pid=62508) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2060.486983] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.487324] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.487550] env[62508]: DEBUG nova.objects.instance [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lazy-loading 'resources' on Instance uuid 001e2821-2e78-4b15-8bb4-0dddff544913 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2061.030532] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feea6385-1ff0-4b35-93e0-7d4214482964 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.038140] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c7d1c3-1654-4e9a-bd58-e2b3cd10b828 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.068534] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1dcdf6-1bd7-4e76-ad7e-95bd28d5ed06 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.075709] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c89a027-e7a0-4b93-be7c-ddca56b71ade {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.088661] env[62508]: DEBUG nova.compute.provider_tree [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2061.592290] env[62508]: DEBUG nova.scheduler.client.report [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2061.993929] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2061.994223] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2061.994223] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2062.098690] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.524272] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.524429] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.524576] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2062.524728] env[62508]: DEBUG nova.objects.instance [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lazy-loading 'info_cache' on Instance uuid db292f08-6a5d-401e-bdba-a368cde4cd39 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2062.615791] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ec723a7b-ea92-4de0-9115-3a08caecb16f tempest-ServerActionsV293TestJSON-777634103 tempest-ServerActionsV293TestJSON-777634103-project-member] Lock "001e2821-2e78-4b15-8bb4-0dddff544913" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.734s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.240997] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [{"id": "9136b65f-0214-4b9c-8c13-28ee6289e941", "address": "fa:16:3e:8d:d7:cd", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9136b65f-02", "ovs_interfaceid": "9136b65f-0214-4b9c-8c13-28ee6289e941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.744190] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-db292f08-6a5d-401e-bdba-a368cde4cd39" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.744423] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2064.744653] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.744818] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.744950] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2065.993501] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.497380] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.497736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.498044] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.498270] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2066.499255] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f04ef7-04ac-4f35-adf9-31102ce1cf55 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.507909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d739240-884a-44d4-9d8b-b6b8f9a35cbc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.522487] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff51a6df-8f61-4d82-a9b9-beb6debb4618 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.529233] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2721caad-3e1b-4503-92af-aa75a90a48dc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.559775] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180568MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2066.559974] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.560161] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.585371] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance db292f08-6a5d-401e-bdba-a368cde4cd39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2067.585733] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 38cc40f2-e322-414b-a266-f7b5dcbedaf8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2067.585733] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2067.585878] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2067.655210] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb3a69f-d3e2-47dd-aa99-e2f9ea951902 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.662667] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4ac3cc-f83b-498d-a08c-8bd875cd82b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.691450] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e31879a-5e2c-4842-ba13-971bd079020f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.698883] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5833da0-10cf-4e6f-85c5-1727c2f42533 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.712769] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.215974] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2068.721571] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2068.722031] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.162s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.722227] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.722678] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.722678] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2072.989173] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.746269] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.746656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.746707] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.746880] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.747095] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.750524] env[62508]: INFO nova.compute.manager [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Terminating instance [ 2075.752194] env[62508]: DEBUG nova.compute.manager [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2075.752392] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2075.753233] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833c4316-d1c2-4a0c-84dc-cade5cd85033 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.761246] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2075.761471] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fbb5cc7-ea80-452a-a706-8b92d9a0b800 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.767874] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2075.767874] env[62508]: value = "task-1777016" [ 2075.767874] env[62508]: _type = "Task" [ 2075.767874] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.775729] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.277468] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777016, 'name': PowerOffVM_Task, 'duration_secs': 0.172544} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.277719] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.277894] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.278146] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edebf90c-2cf5-49b7-aa10-bd6082012273 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.355063] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.355295] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.355486] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleting the datastore file [datastore1] 38cc40f2-e322-414b-a266-f7b5dcbedaf8 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.355752] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6f32ea2-a81c-45cf-a2bd-6ecfab21511d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.361858] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2076.361858] env[62508]: value = "task-1777018" [ 2076.361858] env[62508]: _type = "Task" [ 2076.361858] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.369179] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.872161] env[62508]: DEBUG oslo_vmware.api [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149665} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.872547] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2076.872547] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2076.872706] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2076.872885] env[62508]: INFO nova.compute.manager [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2076.873141] env[62508]: DEBUG oslo.service.loopingcall [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2076.873339] env[62508]: DEBUG nova.compute.manager [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2076.873439] env[62508]: DEBUG nova.network.neutron [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2076.993473] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.282218] env[62508]: DEBUG nova.compute.manager [req-e7a89167-de55-48f8-8a3c-6be56b599369 req-219f3a5b-81ae-4ba3-822e-eb90efd8e12e service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Received event network-vif-deleted-882a20ab-228f-41be-ad63-db5d912770ee {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2077.282432] env[62508]: INFO nova.compute.manager [req-e7a89167-de55-48f8-8a3c-6be56b599369 req-219f3a5b-81ae-4ba3-822e-eb90efd8e12e service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Neutron deleted interface 882a20ab-228f-41be-ad63-db5d912770ee; detaching it from the instance and deleting it from the info cache [ 2077.282621] env[62508]: DEBUG nova.network.neutron [req-e7a89167-de55-48f8-8a3c-6be56b599369 req-219f3a5b-81ae-4ba3-822e-eb90efd8e12e service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.761230] env[62508]: DEBUG nova.network.neutron [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.784978] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8804db10-0700-4af5-a331-6a015a13082d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.794977] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1e3a8e-cab4-412e-b92f-cb9285b41351 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.821641] env[62508]: DEBUG nova.compute.manager [req-e7a89167-de55-48f8-8a3c-6be56b599369 req-219f3a5b-81ae-4ba3-822e-eb90efd8e12e service nova] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Detach interface failed, port_id=882a20ab-228f-41be-ad63-db5d912770ee, reason: Instance 38cc40f2-e322-414b-a266-f7b5dcbedaf8 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2078.264599] env[62508]: INFO nova.compute.manager [-] [instance: 38cc40f2-e322-414b-a266-f7b5dcbedaf8] Took 1.39 seconds to deallocate network for instance. [ 2078.772585] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.772585] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.772585] env[62508]: DEBUG nova.objects.instance [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'resources' on Instance uuid 38cc40f2-e322-414b-a266-f7b5dcbedaf8 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2078.988514] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.315746] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d16b890-4f46-4807-bb40-058042fe8245 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.323411] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c797a03-28f7-4efe-a851-83e1a797cd7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.353050] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fd2bb5-1716-4d9c-a6a0-663950d9587f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.360402] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfcf2f8-9737-4287-9053-3d1408dff98f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.374962] env[62508]: DEBUG nova.compute.provider_tree [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2079.878490] env[62508]: DEBUG nova.scheduler.client.report [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2080.384039] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.401458] env[62508]: INFO nova.scheduler.client.report [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted allocations for instance 38cc40f2-e322-414b-a266-f7b5dcbedaf8 [ 2080.910014] env[62508]: DEBUG oslo_concurrency.lockutils [None req-14213efb-3571-4865-80bb-2030c4cbbdb7 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "38cc40f2-e322-414b-a266-f7b5dcbedaf8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.163s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.610840] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.611263] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.114475] env[62508]: INFO nova.compute.manager [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Detaching volume 3a834111-65b6-4937-bbae-d9af1ba7629e [ 2082.143505] env[62508]: INFO nova.virt.block_device [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Attempting to driver detach volume 3a834111-65b6-4937-bbae-d9af1ba7629e from mountpoint /dev/sdb [ 2082.143742] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2082.143933] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368865', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'name': 'volume-3a834111-65b6-4937-bbae-d9af1ba7629e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db292f08-6a5d-401e-bdba-a368cde4cd39', 'attached_at': '', 'detached_at': '', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'serial': '3a834111-65b6-4937-bbae-d9af1ba7629e'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2082.144818] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dec80c5-2e96-417a-9033-58a48c2ba9e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.166616] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1b6298-b7c6-430f-b7c0-aff9f34ba363 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.173291] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2153179-6100-48f6-a641-6dc6a515cf65 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.194405] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70801da3-46b4-4d5a-aad8-11f785030185 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.208560] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] The volume has not been displaced from its original location: [datastore1] volume-3a834111-65b6-4937-bbae-d9af1ba7629e/volume-3a834111-65b6-4937-bbae-d9af1ba7629e.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2082.213758] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2082.213998] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e1b390e-af43-4a3b-9af3-ae9bcb024ab0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.232633] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2082.232633] env[62508]: value = "task-1777019" [ 2082.232633] env[62508]: _type = "Task" [ 2082.232633] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.242650] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777019, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.742937] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777019, 'name': ReconfigVM_Task, 'duration_secs': 0.246912} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.743290] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2082.747903] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-115ceb13-8eb2-4cc8-899b-45684eaf75c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.762232] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2082.762232] env[62508]: value = "task-1777020" [ 2082.762232] env[62508]: _type = "Task" [ 2082.762232] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.769683] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777020, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.271547] env[62508]: DEBUG oslo_vmware.api [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777020, 'name': ReconfigVM_Task, 'duration_secs': 0.132559} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.271853] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368865', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'name': 'volume-3a834111-65b6-4937-bbae-d9af1ba7629e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db292f08-6a5d-401e-bdba-a368cde4cd39', 'attached_at': '', 'detached_at': '', 'volume_id': '3a834111-65b6-4937-bbae-d9af1ba7629e', 'serial': '3a834111-65b6-4937-bbae-d9af1ba7629e'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2083.815656] env[62508]: DEBUG nova.objects.instance [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid db292f08-6a5d-401e-bdba-a368cde4cd39 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2084.822736] env[62508]: DEBUG oslo_concurrency.lockutils [None req-c7a39508-125b-412b-85ee-8ba2dcbee5ae tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.845912] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.846370] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.846498] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.846643] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.846823] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.848977] env[62508]: INFO nova.compute.manager [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Terminating instance [ 2085.850725] env[62508]: DEBUG nova.compute.manager [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2085.850929] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2085.851772] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b7d7a3-f933-40b8-b0d0-445e059722a3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.859601] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2085.860092] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee28e30e-9d5c-4708-8af1-174900777da7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.866180] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2085.866180] env[62508]: value = "task-1777021" [ 2085.866180] env[62508]: _type = "Task" [ 2085.866180] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.873948] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.376091] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777021, 'name': PowerOffVM_Task, 'duration_secs': 0.192827} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.376364] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2086.376538] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2086.376782] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31928888-ee9a-4903-9e77-d7412d6c3d48 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.604658] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2086.604894] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2086.605100] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleting the datastore file [datastore1] db292f08-6a5d-401e-bdba-a368cde4cd39 {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2086.605381] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d572fc85-c944-40d2-8141-ffe856751331 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.614215] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2086.614215] env[62508]: value = "task-1777023" [ 2086.614215] env[62508]: _type = "Task" [ 2086.614215] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.621729] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.124302] env[62508]: DEBUG oslo_vmware.api [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126826} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.124777] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2087.124777] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2087.124917] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2087.125094] env[62508]: INFO nova.compute.manager [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2087.125349] env[62508]: DEBUG oslo.service.loopingcall [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.125548] env[62508]: DEBUG nova.compute.manager [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2087.125638] env[62508]: DEBUG nova.network.neutron [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2087.751756] env[62508]: DEBUG nova.compute.manager [req-383f521b-355d-461b-a5c4-be0c34c4188e req-eac0eba7-9b77-48b6-9d4c-73459d60b143 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Received event network-vif-deleted-9136b65f-0214-4b9c-8c13-28ee6289e941 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2087.752083] env[62508]: INFO nova.compute.manager [req-383f521b-355d-461b-a5c4-be0c34c4188e req-eac0eba7-9b77-48b6-9d4c-73459d60b143 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Neutron deleted interface 9136b65f-0214-4b9c-8c13-28ee6289e941; detaching it from the instance and deleting it from the info cache [ 2087.752218] env[62508]: DEBUG nova.network.neutron [req-383f521b-355d-461b-a5c4-be0c34c4188e req-eac0eba7-9b77-48b6-9d4c-73459d60b143 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.231056] env[62508]: DEBUG nova.network.neutron [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.254586] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ee70074-a35c-43b0-95e0-0d525061765b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.264306] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf70b81-7a67-489e-8a82-a162c9d11624 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.288842] env[62508]: DEBUG nova.compute.manager [req-383f521b-355d-461b-a5c4-be0c34c4188e req-eac0eba7-9b77-48b6-9d4c-73459d60b143 service nova] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Detach interface failed, port_id=9136b65f-0214-4b9c-8c13-28ee6289e941, reason: Instance db292f08-6a5d-401e-bdba-a368cde4cd39 could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2088.734592] env[62508]: INFO nova.compute.manager [-] [instance: db292f08-6a5d-401e-bdba-a368cde4cd39] Took 1.61 seconds to deallocate network for instance. [ 2089.241058] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.241468] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.241628] env[62508]: DEBUG nova.objects.instance [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'resources' on Instance uuid db292f08-6a5d-401e-bdba-a368cde4cd39 {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2089.776600] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751d008e-4318-4c66-910c-eee1396b2914 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.784087] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ef09f4-a740-4574-bf01-902c668152f8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.814583] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eb2d81-54ce-4d7b-8ee8-80e8674a0177 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.821867] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ce4dfc-be4c-4677-9518-d70bea2241b9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.835317] env[62508]: DEBUG nova.compute.provider_tree [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2090.338930] env[62508]: DEBUG nova.scheduler.client.report [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2090.843656] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.602s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.863958] env[62508]: INFO nova.scheduler.client.report [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted allocations for instance db292f08-6a5d-401e-bdba-a368cde4cd39 [ 2091.371845] env[62508]: DEBUG oslo_concurrency.lockutils [None req-d7beb283-16f5-4c28-99fc-797a8c1eedc0 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "db292f08-6a5d-401e-bdba-a368cde4cd39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.526s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.784824] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.785180] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.287907] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2094.807321] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.807576] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.809157] env[62508]: INFO nova.compute.claims [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2095.843190] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f421538e-2952-4df1-a410-1d48cb0778bd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.850971] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9e2cad-245f-4005-a026-eac0da837d72 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.880037] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e903100f-976b-4846-9765-ad9e9558cc03 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.886976] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affb20a4-4553-4fc8-97f3-c3ada87d82b4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.899709] env[62508]: DEBUG nova.compute.provider_tree [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.403078] env[62508]: DEBUG nova.scheduler.client.report [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2096.908339] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.100s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.908886] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2097.413620] env[62508]: DEBUG nova.compute.utils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2097.416440] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2097.416625] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2097.464308] env[62508]: DEBUG nova.policy [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66400df997044a7ca8b711be48707221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fda0b7fb924f1d97862bf4124f9c20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 2097.723121] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Successfully created port: b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2097.920103] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2098.930320] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2098.957698] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2098.957950] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2098.958128] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2098.958360] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2098.958516] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2098.958664] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2098.958870] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2098.959047] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2098.959219] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2098.959384] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2098.959647] env[62508]: DEBUG nova.virt.hardware [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2098.960548] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20efdef6-9db9-4865-9a73-dc82d441fafd {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.968614] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ade3e0-c04e-4520-9404-1d55267487e7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.091604] env[62508]: DEBUG nova.compute.manager [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Received event network-vif-plugged-b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2099.091893] env[62508]: DEBUG oslo_concurrency.lockutils [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.092083] env[62508]: DEBUG oslo_concurrency.lockutils [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.092266] env[62508]: DEBUG oslo_concurrency.lockutils [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.092439] env[62508]: DEBUG nova.compute.manager [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] No waiting events found dispatching network-vif-plugged-b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2099.092605] env[62508]: WARNING nova.compute.manager [req-d738d65f-590e-4e0e-a1d1-2b28470a87a9 req-5444ba1e-fc19-4918-9026-c949ceac3a84 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Received unexpected event network-vif-plugged-b3cf3d80-1055-4a42-b10b-0e30c8b42000 for instance with vm_state building and task_state spawning. [ 2099.170030] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Successfully updated port: b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2099.674511] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.674629] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.674787] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2100.206405] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2100.329087] env[62508]: DEBUG nova.network.neutron [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating instance_info_cache with network_info: [{"id": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "address": "fa:16:3e:6e:d9:8d", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cf3d80-10", "ovs_interfaceid": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.831909] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.832272] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Instance network_info: |[{"id": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "address": "fa:16:3e:6e:d9:8d", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cf3d80-10", "ovs_interfaceid": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2100.832704] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:d9:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3cf3d80-1055-4a42-b10b-0e30c8b42000', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2100.840627] env[62508]: DEBUG oslo.service.loopingcall [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2100.840838] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2100.841082] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a73f20df-bcf6-48e3-a211-b2654779c474 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.861988] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2100.861988] env[62508]: value = "task-1777024" [ 2100.861988] env[62508]: _type = "Task" [ 2100.861988] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.869274] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777024, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.117252] env[62508]: DEBUG nova.compute.manager [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Received event network-changed-b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2101.117382] env[62508]: DEBUG nova.compute.manager [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Refreshing instance network info cache due to event network-changed-b3cf3d80-1055-4a42-b10b-0e30c8b42000. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2101.117600] env[62508]: DEBUG oslo_concurrency.lockutils [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] Acquiring lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2101.117748] env[62508]: DEBUG oslo_concurrency.lockutils [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] Acquired lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2101.117912] env[62508]: DEBUG nova.network.neutron [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Refreshing network info cache for port b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2101.372126] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777024, 'name': CreateVM_Task, 'duration_secs': 0.473179} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.372126] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2101.372806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2101.372806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2101.372806] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2101.373041] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb896601-9944-4af5-a19c-e2f55dd9d881 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.377675] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2101.377675] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e673bd-8b38-bf88-2d30-be1ac3cff260" [ 2101.377675] env[62508]: _type = "Task" [ 2101.377675] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.384966] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e673bd-8b38-bf88-2d30-be1ac3cff260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.811794] env[62508]: DEBUG nova.network.neutron [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updated VIF entry in instance network info cache for port b3cf3d80-1055-4a42-b10b-0e30c8b42000. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2101.812168] env[62508]: DEBUG nova.network.neutron [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating instance_info_cache with network_info: [{"id": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "address": "fa:16:3e:6e:d9:8d", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cf3d80-10", "ovs_interfaceid": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.888248] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52e673bd-8b38-bf88-2d30-be1ac3cff260, 'name': SearchDatastore_Task, 'duration_secs': 0.013694} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.888544] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.888771] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2101.888996] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2101.889200] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2101.889386] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2101.889628] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ce0d683-0ca3-436f-b70e-d3600a4e52b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.898282] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2101.898414] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2101.899082] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6182a189-3a2b-434c-9bc6-a628165e8297 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.903486] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2101.903486] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291599f-1028-459c-2d2e-e59f77604438" [ 2101.903486] env[62508]: _type = "Task" [ 2101.903486] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.910377] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291599f-1028-459c-2d2e-e59f77604438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.315197] env[62508]: DEBUG oslo_concurrency.lockutils [req-2c2e6c7a-053f-4276-87ad-c0503aaf7355 req-100c513d-e330-49ec-8c0e-53384abfeaec service nova] Releasing lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2102.413504] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5291599f-1028-459c-2d2e-e59f77604438, 'name': SearchDatastore_Task, 'duration_secs': 0.008456} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.414294] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05f00f95-48d3-426f-99ce-de17f9f6d0bf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.419381] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2102.419381] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aeb9d7-9fdc-fdec-0a6c-61b60cbe6c28" [ 2102.419381] env[62508]: _type = "Task" [ 2102.419381] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.426604] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aeb9d7-9fdc-fdec-0a6c-61b60cbe6c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.929452] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]52aeb9d7-9fdc-fdec-0a6c-61b60cbe6c28, 'name': SearchDatastore_Task, 'duration_secs': 0.009517} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.929704] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2102.929961] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed/e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2102.930230] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb3c8c08-a9aa-4cd1-8e5e-5bf1556620ae {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.936476] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2102.936476] env[62508]: value = "task-1777025" [ 2102.936476] env[62508]: _type = "Task" [ 2102.936476] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.943767] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.446023] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461248} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.446394] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed/e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2103.446544] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2103.446758] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3aaafd21-2d6c-4b7c-b27e-90d34743a999 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.454241] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2103.454241] env[62508]: value = "task-1777026" [ 2103.454241] env[62508]: _type = "Task" [ 2103.454241] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.462199] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.964427] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058896} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.964696] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2103.965470] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c49fffd-89c6-40de-8ae8-fc577e6d65cc {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.986717] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed/e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2103.987016] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50bd8cb5-6d51-4430-84c2-f48c8547daba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.007274] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2104.007274] env[62508]: value = "task-1777027" [ 2104.007274] env[62508]: _type = "Task" [ 2104.007274] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.015151] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.517897] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777027, 'name': ReconfigVM_Task, 'duration_secs': 0.299731} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.518326] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfigured VM instance instance-0000007b to attach disk [datastore1] e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed/e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2104.518817] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b6e8d90-29e9-4ef6-883b-65bbf7a70f8d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.524463] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2104.524463] env[62508]: value = "task-1777028" [ 2104.524463] env[62508]: _type = "Task" [ 2104.524463] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.532047] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777028, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.034608] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777028, 'name': Rename_Task, 'duration_secs': 0.237015} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.034883] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2105.035141] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0af05f70-9a0a-4646-aba2-8a6dd2d9d31c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.041243] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2105.041243] env[62508]: value = "task-1777029" [ 2105.041243] env[62508]: _type = "Task" [ 2105.041243] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.049126] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.551358] env[62508]: DEBUG oslo_vmware.api [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777029, 'name': PowerOnVM_Task, 'duration_secs': 0.430513} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.551749] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2105.551872] env[62508]: INFO nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Took 6.62 seconds to spawn the instance on the hypervisor. [ 2105.551997] env[62508]: DEBUG nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2105.552749] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3c00a9-2d36-4e84-9102-6d641491cf25 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.071602] env[62508]: INFO nova.compute.manager [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Took 11.28 seconds to build instance. [ 2106.574388] env[62508]: DEBUG oslo_concurrency.lockutils [None req-04a69670-db0b-4600-ba04-b2e86d366042 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.789s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.756683] env[62508]: DEBUG nova.compute.manager [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Received event network-changed-b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2106.756943] env[62508]: DEBUG nova.compute.manager [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Refreshing instance network info cache due to event network-changed-b3cf3d80-1055-4a42-b10b-0e30c8b42000. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2106.757183] env[62508]: DEBUG oslo_concurrency.lockutils [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] Acquiring lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.757334] env[62508]: DEBUG oslo_concurrency.lockutils [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] Acquired lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.757503] env[62508]: DEBUG nova.network.neutron [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Refreshing network info cache for port b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2107.460347] env[62508]: DEBUG nova.network.neutron [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updated VIF entry in instance network info cache for port b3cf3d80-1055-4a42-b10b-0e30c8b42000. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2107.460710] env[62508]: DEBUG nova.network.neutron [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating instance_info_cache with network_info: [{"id": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "address": "fa:16:3e:6e:d9:8d", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cf3d80-10", "ovs_interfaceid": "b3cf3d80-1055-4a42-b10b-0e30c8b42000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.963423] env[62508]: DEBUG oslo_concurrency.lockutils [req-d6ee715b-e48e-4d49-aaf4-ca03cf126731 req-b824e762-9f03-4a07-9d2a-407b5dbeb007 service nova] Releasing lock "refresh_cache-e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.993691] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2122.994185] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2123.999220] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2123.999623] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.999623] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2124.993831] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.994036] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.496770] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.496976] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.497171] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.497331] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2128.498259] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e060429-c38f-44d4-a5c9-845c37dd3936 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.507012] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51f1777-78d0-4be3-82e4-55694dc1abcf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.521499] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e192f4f0-4e6c-4117-87fe-e0fa95eaf83d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.527895] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ac653f-e2f9-4df8-aff8-c34dbe0f05a2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.555919] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181115MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2128.556082] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.556267] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.580340] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2129.580625] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2129.580670] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2129.606282] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c44f64e-47ea-4eb0-beb8-403989e1d1ad {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.613494] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb01c43-bbd6-43d8-be50-71dde9dfe9ec {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.643463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26252b16-8955-448e-a60b-91da0e6febdb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.650164] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab07563-df17-48a5-ae7e-b4364775f07b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.662720] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.165830] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2130.671259] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2130.671640] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.672140] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2133.672463] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2133.672532] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2133.672679] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.994368] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.271618] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.271932] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.775114] env[62508]: DEBUG nova.compute.utils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2145.278692] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.338303] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.338688] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.338813] env[62508]: INFO nova.compute.manager [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Attaching volume a75b944b-18d9-456c-90c8-7e50a33e8791 to /dev/sdb [ 2146.368987] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e34f36-e7f1-440e-a156-9ef169c0a793 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.375933] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c88ed-028e-42bc-9e9b-ed99994c16c9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.389062] env[62508]: DEBUG nova.virt.block_device [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating existing volume attachment record: d19c7414-7b29-42e1-b19c-057f1fed8448 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2150.931357] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2150.931591] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368872', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'name': 'volume-a75b944b-18d9-456c-90c8-7e50a33e8791', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed', 'attached_at': '', 'detached_at': '', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'serial': 'a75b944b-18d9-456c-90c8-7e50a33e8791'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2150.932480] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6c3ad0-c361-46f6-8f82-2dfa795aa56c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.948895] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a57ab75-7cdc-46f4-a783-f80103abfd91 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.974079] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-a75b944b-18d9-456c-90c8-7e50a33e8791/volume-a75b944b-18d9-456c-90c8-7e50a33e8791.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2150.974369] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d7400dd-a725-46af-872f-8c30c1552de0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.993260] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2150.993260] env[62508]: value = "task-1777032" [ 2150.993260] env[62508]: _type = "Task" [ 2150.993260] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.002261] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.503232] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777032, 'name': ReconfigVM_Task, 'duration_secs': 0.395958} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.503512] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-a75b944b-18d9-456c-90c8-7e50a33e8791/volume-a75b944b-18d9-456c-90c8-7e50a33e8791.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2151.508380] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cad213c6-c4e6-4a4e-a2c1-823309679de8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.524037] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2151.524037] env[62508]: value = "task-1777033" [ 2151.524037] env[62508]: _type = "Task" [ 2151.524037] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.532467] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.033950] env[62508]: DEBUG oslo_vmware.api [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777033, 'name': ReconfigVM_Task, 'duration_secs': 0.160165} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.034348] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368872', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'name': 'volume-a75b944b-18d9-456c-90c8-7e50a33e8791', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed', 'attached_at': '', 'detached_at': '', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'serial': 'a75b944b-18d9-456c-90c8-7e50a33e8791'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2153.070467] env[62508]: DEBUG nova.objects.instance [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2153.576361] env[62508]: DEBUG oslo_concurrency.lockutils [None req-399a62ef-8507-4f64-bb91-b4ade2a31d70 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2153.779053] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.779053] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.282229] env[62508]: INFO nova.compute.manager [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Detaching volume a75b944b-18d9-456c-90c8-7e50a33e8791 [ 2154.315676] env[62508]: INFO nova.virt.block_device [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Attempting to driver detach volume a75b944b-18d9-456c-90c8-7e50a33e8791 from mountpoint /dev/sdb [ 2154.315926] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2154.316133] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368872', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'name': 'volume-a75b944b-18d9-456c-90c8-7e50a33e8791', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed', 'attached_at': '', 'detached_at': '', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'serial': 'a75b944b-18d9-456c-90c8-7e50a33e8791'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2154.317082] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04ede6f-c28c-4e77-9d10-d2684067552e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.338257] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf41ef9-1e75-4933-b07e-0c3cf87c562d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.344909] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a82ad29-120c-4488-8371-372db90fef46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.364804] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a866e936-4a54-4337-b0fc-e470b399eb7b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.380102] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] The volume has not been displaced from its original location: [datastore1] volume-a75b944b-18d9-456c-90c8-7e50a33e8791/volume-a75b944b-18d9-456c-90c8-7e50a33e8791.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2154.385379] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2154.385668] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5ed2aa4-4360-4362-abeb-04647874b3a0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.403336] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2154.403336] env[62508]: value = "task-1777034" [ 2154.403336] env[62508]: _type = "Task" [ 2154.403336] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.411894] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2154.913259] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777034, 'name': ReconfigVM_Task, 'duration_secs': 0.205874} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2154.913600] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2154.918283] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbd5cb26-7df1-40a2-b94d-e947fbfc681d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.932706] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2154.932706] env[62508]: value = "task-1777035" [ 2154.932706] env[62508]: _type = "Task" [ 2154.932706] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.940267] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777035, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2155.442407] env[62508]: DEBUG oslo_vmware.api [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777035, 'name': ReconfigVM_Task, 'duration_secs': 0.132839} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2155.442787] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368872', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'name': 'volume-a75b944b-18d9-456c-90c8-7e50a33e8791', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed', 'attached_at': '', 'detached_at': '', 'volume_id': 'a75b944b-18d9-456c-90c8-7e50a33e8791', 'serial': 'a75b944b-18d9-456c-90c8-7e50a33e8791'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2155.982765] env[62508]: DEBUG nova.objects.instance [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2156.991450] env[62508]: DEBUG oslo_concurrency.lockutils [None req-2b25dd57-fe01-431f-a29d-8f987153bec6 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.213s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.018938] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.019313] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.019416] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.019661] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.019882] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.022034] env[62508]: INFO nova.compute.manager [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Terminating instance [ 2158.023890] env[62508]: DEBUG nova.compute.manager [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2158.024148] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2158.025053] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547f2e19-c3e5-4a1e-82f9-caefbd639665 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.033154] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2158.033419] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e884153-2cb0-4fbb-ac69-567b48f4d00b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.039838] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2158.039838] env[62508]: value = "task-1777036" [ 2158.039838] env[62508]: _type = "Task" [ 2158.039838] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.047652] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777036, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.549571] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777036, 'name': PowerOffVM_Task, 'duration_secs': 0.162919} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.549886] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2158.550073] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2158.550324] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29063ae4-9ce4-4086-a5e6-19704fd4ef74 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.624756] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2158.625029] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2158.625188] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleting the datastore file [datastore1] e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2158.625458] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bca893f5-04d9-4cd3-99c0-0af2cd6544c2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.631215] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2158.631215] env[62508]: value = "task-1777038" [ 2158.631215] env[62508]: _type = "Task" [ 2158.631215] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.638873] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.141664] env[62508]: DEBUG oslo_vmware.api [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134508} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.142130] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2159.142130] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2159.142292] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2159.142459] env[62508]: INFO nova.compute.manager [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2159.142697] env[62508]: DEBUG oslo.service.loopingcall [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2159.142886] env[62508]: DEBUG nova.compute.manager [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2159.142983] env[62508]: DEBUG nova.network.neutron [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2159.565349] env[62508]: DEBUG nova.compute.manager [req-4fb5d64c-f310-4805-9f3c-4c56884777f8 req-db2dd5d2-ea48-4b05-9dcb-26ca0cef3da3 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Received event network-vif-deleted-b3cf3d80-1055-4a42-b10b-0e30c8b42000 {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2159.565567] env[62508]: INFO nova.compute.manager [req-4fb5d64c-f310-4805-9f3c-4c56884777f8 req-db2dd5d2-ea48-4b05-9dcb-26ca0cef3da3 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Neutron deleted interface b3cf3d80-1055-4a42-b10b-0e30c8b42000; detaching it from the instance and deleting it from the info cache [ 2159.565735] env[62508]: DEBUG nova.network.neutron [req-4fb5d64c-f310-4805-9f3c-4c56884777f8 req-db2dd5d2-ea48-4b05-9dcb-26ca0cef3da3 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.042807] env[62508]: DEBUG nova.network.neutron [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.068598] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5e1d7e4-e4ba-42b3-8436-6bc56eb8bee0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.079490] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29746777-7d15-4f81-9606-87c3c5cbf9e0 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.103247] env[62508]: DEBUG nova.compute.manager [req-4fb5d64c-f310-4805-9f3c-4c56884777f8 req-db2dd5d2-ea48-4b05-9dcb-26ca0cef3da3 service nova] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Detach interface failed, port_id=b3cf3d80-1055-4a42-b10b-0e30c8b42000, reason: Instance e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2160.545951] env[62508]: INFO nova.compute.manager [-] [instance: e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed] Took 1.40 seconds to deallocate network for instance. [ 2161.054099] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.054500] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.054813] env[62508]: DEBUG nova.objects.instance [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'resources' on Instance uuid e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2161.589622] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47f250b-49b0-4d85-917b-b00f69ec1d19 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.597440] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f6e3e4-cad5-4578-88d5-69103a94727e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.628293] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9ad644-73fb-4739-bf0f-d82f5bfeee76 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.635426] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d7c322-b810-4b79-a17e-635917e194e4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.648616] env[62508]: DEBUG nova.compute.provider_tree [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.152301] env[62508]: DEBUG nova.scheduler.client.report [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2162.657657] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.603s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.677735] env[62508]: INFO nova.scheduler.client.report [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted allocations for instance e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed [ 2163.186607] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cdb06327-36ee-43c6-a0e7-c9456ea53984 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "e92bf1b1-4cb9-41b3-98a8-0d0aab1951ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.167s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.806732] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.807075] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.310048] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Starting instance... {{(pid=62508) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2165.828481] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.828765] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.830404] env[62508]: INFO nova.compute.claims [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2166.865558] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f5bace-646a-4e77-90ab-1d937c499e22 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.874704] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21620bf0-d0f3-4380-9b48-599857315704 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.904047] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b4e990-6d3b-4c50-9126-cee16cd59942 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.911170] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12008e35-9117-470c-9572-7d6e3063e3f1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.924675] env[62508]: DEBUG nova.compute.provider_tree [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2167.427658] env[62508]: DEBUG nova.scheduler.client.report [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2167.933027] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.104s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.933677] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Start building networks asynchronously for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2168.439179] env[62508]: DEBUG nova.compute.utils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2168.440962] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Allocating IP information in the background. {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2168.441063] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] allocate_for_instance() {{(pid=62508) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2168.499206] env[62508]: DEBUG nova.policy [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66400df997044a7ca8b711be48707221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fda0b7fb924f1d97862bf4124f9c20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62508) authorize /opt/stack/nova/nova/policy.py:201}} [ 2168.760159] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Successfully created port: 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2168.944823] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Start building block device mappings for instance. {{(pid=62508) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2169.956505] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Start spawning the instance on the hypervisor. {{(pid=62508) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2169.981804] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:54:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:54:34Z,direct_url=,disk_format='vmdk',id=f81c384b-39f5-44b6-928f-ab9f4bc0a9f7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a593c1556b124090beac642efa68ce00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:54:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2169.982073] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2169.982239] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image limits 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2169.982423] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Flavor pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2169.982570] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Image pref 0:0:0 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2169.982716] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62508) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2169.982919] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2169.983098] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2169.983268] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Got 1 possible topologies {{(pid=62508) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2169.983430] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2169.983603] env[62508]: DEBUG nova.virt.hardware [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62508) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2169.984482] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5feaa6f-93ef-46e4-9a94-ff92db5e2294 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.992527] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47001f21-ce1f-4be9-b626-dfb644139a7d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.121705] env[62508]: DEBUG nova.compute.manager [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Received event network-vif-plugged-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2170.121921] env[62508]: DEBUG oslo_concurrency.lockutils [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.122011] env[62508]: DEBUG oslo_concurrency.lockutils [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.122188] env[62508]: DEBUG oslo_concurrency.lockutils [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.122356] env[62508]: DEBUG nova.compute.manager [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] No waiting events found dispatching network-vif-plugged-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2170.122518] env[62508]: WARNING nova.compute.manager [req-32f6f9fb-c7ee-4cca-94f8-71861419499b req-d3ca7cdd-4bf8-4c2d-b419-81aec4bad1bd service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Received unexpected event network-vif-plugged-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a for instance with vm_state building and task_state spawning. [ 2170.201584] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Successfully updated port: 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2170.704594] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.704774] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.704901] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Building network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2171.234410] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Instance cache missing network info. {{(pid=62508) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2171.360715] env[62508]: DEBUG nova.network.neutron [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [{"id": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "address": "fa:16:3e:6a:8e:54", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d9abe0-5e", "ovs_interfaceid": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.863608] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.863958] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Instance network_info: |[{"id": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "address": "fa:16:3e:6a:8e:54", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d9abe0-5e", "ovs_interfaceid": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62508) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2171.864431] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:8e:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64d9abe0-5e58-42fb-bf6b-cceb49a17b4a', 'vif_model': 'vmxnet3'}] {{(pid=62508) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2171.872127] env[62508]: DEBUG oslo.service.loopingcall [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2171.872332] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Creating VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2171.872552] env[62508]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f037d55-8d5b-4ee8-8f11-6833bad7076f {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.892843] env[62508]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2171.892843] env[62508]: value = "task-1777039" [ 2171.892843] env[62508]: _type = "Task" [ 2171.892843] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.900438] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777039, 'name': CreateVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.148233] env[62508]: DEBUG nova.compute.manager [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Received event network-changed-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2172.148434] env[62508]: DEBUG nova.compute.manager [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Refreshing instance network info cache due to event network-changed-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2172.148645] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] Acquiring lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.148794] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] Acquired lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.148957] env[62508]: DEBUG nova.network.neutron [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Refreshing network info cache for port 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2172.402312] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777039, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.826342] env[62508]: DEBUG nova.network.neutron [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updated VIF entry in instance network info cache for port 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2172.826699] env[62508]: DEBUG nova.network.neutron [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [{"id": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "address": "fa:16:3e:6a:8e:54", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d9abe0-5e", "ovs_interfaceid": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.904638] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777039, 'name': CreateVM_Task} progress is 99%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.329484] env[62508]: DEBUG oslo_concurrency.lockutils [req-a2169604-0f48-46f3-8edc-da019269493d req-80db4770-bea3-4fd8-b86a-311d4e903c66 service nova] Releasing lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.404607] env[62508]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777039, 'name': CreateVM_Task, 'duration_secs': 1.318507} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.405060] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Created VM on the ESX host {{(pid=62508) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2173.405434] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2173.405635] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2173.405970] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2173.406231] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-356edb32-2b51-4d14-8731-dc07365b69ba {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.411065] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2173.411065] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527d60a7-a0ab-4d7e-882d-e0a2fa3c19f4" [ 2173.411065] env[62508]: _type = "Task" [ 2173.411065] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.418220] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527d60a7-a0ab-4d7e-882d-e0a2fa3c19f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.921559] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]527d60a7-a0ab-4d7e-882d-e0a2fa3c19f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010508} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.921844] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.922103] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Processing image f81c384b-39f5-44b6-928f-ab9f4bc0a9f7 {{(pid=62508) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2173.922333] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2173.922480] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2173.922657] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2173.922904] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf07b2a7-eb78-48ce-8950-72b5d25282f9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.931116] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62508) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2173.931295] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62508) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2173.931953] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-523cdf95-3d6b-4a66-b9b2-8c7ee50dc3d3 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.936676] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2173.936676] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ba814-b5ee-fdf1-22e3-8f850c444cac" [ 2173.936676] env[62508]: _type = "Task" [ 2173.936676] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.943772] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ba814-b5ee-fdf1-22e3-8f850c444cac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.446561] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]525ba814-b5ee-fdf1-22e3-8f850c444cac, 'name': SearchDatastore_Task, 'duration_secs': 0.008227} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.447567] env[62508]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e536f595-c7b4-438a-b145-5d74ab911525 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.452125] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2174.452125] env[62508]: value = "session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5214e11a-7ccf-e2b4-2eca-a8cce7ee4c26" [ 2174.452125] env[62508]: _type = "Task" [ 2174.452125] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.459184] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5214e11a-7ccf-e2b4-2eca-a8cce7ee4c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.962837] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': session[52cfa133-07ae-3c86-1e18-ada035ceaac6]5214e11a-7ccf-e2b4-2eca-a8cce7ee4c26, 'name': SearchDatastore_Task, 'duration_secs': 0.009134} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.963156] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.963437] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 68a51395-bb35-4f3b-b18e-2483f711a2ca/68a51395-bb35-4f3b-b18e-2483f711a2ca.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2174.963759] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88c2ce38-eea6-4870-b59b-f9ad43df8dc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.970446] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2174.970446] env[62508]: value = "task-1777040" [ 2174.970446] env[62508]: _type = "Task" [ 2174.970446] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.977761] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.479710] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479645} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.481598] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7/f81c384b-39f5-44b6-928f-ab9f4bc0a9f7.vmdk to [datastore1] 68a51395-bb35-4f3b-b18e-2483f711a2ca/68a51395-bb35-4f3b-b18e-2483f711a2ca.vmdk {{(pid=62508) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2175.481598] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Extending root virtual disk to 1048576 {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2175.481598] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c73488b8-4100-403f-b7c8-647aa3950335 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.487246] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2175.487246] env[62508]: value = "task-1777041" [ 2175.487246] env[62508]: _type = "Task" [ 2175.487246] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.494265] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.996881] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062881} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.997173] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Extended root virtual disk {{(pid=62508) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2175.997913] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e8c63b-d2e9-4b37-b4c6-6275345150c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.019172] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 68a51395-bb35-4f3b-b18e-2483f711a2ca/68a51395-bb35-4f3b-b18e-2483f711a2ca.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2176.019391] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb3f6533-8f7a-49d1-bac9-59f22c5dd1c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.038999] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2176.038999] env[62508]: value = "task-1777042" [ 2176.038999] env[62508]: _type = "Task" [ 2176.038999] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.046039] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.550740] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777042, 'name': ReconfigVM_Task, 'duration_secs': 0.294659} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.551155] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 68a51395-bb35-4f3b-b18e-2483f711a2ca/68a51395-bb35-4f3b-b18e-2483f711a2ca.vmdk or device None with type sparse {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2176.551863] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-629ea200-4009-43a8-bd77-dd499b6d6ac5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.558246] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2176.558246] env[62508]: value = "task-1777043" [ 2176.558246] env[62508]: _type = "Task" [ 2176.558246] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.565604] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777043, 'name': Rename_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.068313] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777043, 'name': Rename_Task, 'duration_secs': 0.138908} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.068584] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Powering on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2177.068826] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8227cea7-b03f-464e-86f9-45ef31487660 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.075363] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2177.075363] env[62508]: value = "task-1777044" [ 2177.075363] env[62508]: _type = "Task" [ 2177.075363] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.082643] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.585661] env[62508]: DEBUG oslo_vmware.api [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777044, 'name': PowerOnVM_Task, 'duration_secs': 0.440804} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.586083] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Powered on the VM {{(pid=62508) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2177.586175] env[62508]: INFO nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Took 7.63 seconds to spawn the instance on the hypervisor. [ 2177.586342] env[62508]: DEBUG nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Checking state {{(pid=62508) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2177.587172] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4547d3c8-36ed-40ef-a20f-18bdba33620c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.105157] env[62508]: INFO nova.compute.manager [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Took 12.29 seconds to build instance. [ 2178.607774] env[62508]: DEBUG oslo_concurrency.lockutils [None req-cc778b72-bf83-4900-801e-f08ab53c223a tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.801s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.020256] env[62508]: DEBUG nova.compute.manager [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Received event network-changed-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2179.020609] env[62508]: DEBUG nova.compute.manager [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Refreshing instance network info cache due to event network-changed-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a. {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2179.020998] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] Acquiring lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.021292] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] Acquired lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.021593] env[62508]: DEBUG nova.network.neutron [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Refreshing network info cache for port 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2179.762960] env[62508]: DEBUG nova.network.neutron [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updated VIF entry in instance network info cache for port 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a. {{(pid=62508) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2179.763366] env[62508]: DEBUG nova.network.neutron [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [{"id": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "address": "fa:16:3e:6a:8e:54", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d9abe0-5e", "ovs_interfaceid": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.265814] env[62508]: DEBUG oslo_concurrency.lockutils [req-15ebbbb8-dc3e-4b0c-b8ec-74677704657c req-6193fcd7-acf8-400b-967b-f97fb24f85e0 service nova] Releasing lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.994136] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2183.994136] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2183.994688] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2184.557728] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.557886] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquired lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.558089] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Forcefully refreshing network info cache for instance {{(pid=62508) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2184.558272] env[62508]: DEBUG nova.objects.instance [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lazy-loading 'info_cache' on Instance uuid 68a51395-bb35-4f3b-b18e-2483f711a2ca {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2186.269218] env[62508]: DEBUG nova.network.neutron [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [{"id": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "address": "fa:16:3e:6a:8e:54", "network": {"id": "f51db9ea-4de8-40f0-9ace-aac05e474fd2", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-866700916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fda0b7fb924f1d97862bf4124f9c20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64d9abe0-5e", "ovs_interfaceid": "64d9abe0-5e58-42fb-bf6b-cceb49a17b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.771682] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Releasing lock "refresh_cache-68a51395-bb35-4f3b-b18e-2483f711a2ca" {{(pid=62508) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.771905] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updated the network info_cache for instance {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2186.772161] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.772331] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.772462] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2187.994313] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.497733] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.498034] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.498144] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.498301] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2188.499230] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee8a325-7886-45d3-b117-672814f64094 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.508194] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8f3270-52e4-4213-98be-97fac5c016bb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.523098] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e104ce-5db5-49bf-bbde-894744d85f7e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.529961] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d9c237-f345-452d-b89b-a995eeeb5c85 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.559170] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181132MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2188.559377] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.559520] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.584472] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Instance 68a51395-bb35-4f3b-b18e-2483f711a2ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62508) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2189.584767] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2189.584826] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2189.611015] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a583d3-cf81-4ce8-9f8c-55cb6d5a4de2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.618450] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7f8f1b-df9e-49a0-85bb-46fe8c1b5ce6 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.648094] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47aa848d-f440-423e-89af-253bcb033fab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.655044] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e30feb-2c59-41a4-a9f6-ebea1961437a {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.667626] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2190.170633] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2190.676141] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2190.676551] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.117s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.676625] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2193.988991] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2193.992655] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2194.993628] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.994920] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.989545] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2217.238022] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.239020] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.742406] env[62508]: DEBUG nova.compute.utils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Using /dev/sd instead of None {{(pid=62508) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2218.245589] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.302518] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.302923] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.303049] env[62508]: INFO nova.compute.manager [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Attaching volume e25ffb87-398b-4ad5-9b2e-ba3563c07315 to /dev/sdb [ 2219.333104] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f81ff68-4889-493d-8a24-301f3d2e37e2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.340320] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0867f2-9e64-42ea-a25c-733e6516fbc8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.354307] env[62508]: DEBUG nova.virt.block_device [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating existing volume attachment record: 764d9bb5-6beb-4928-b442-ae04636e44e2 {{(pid=62508) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2223.896637] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Volume attach. Driver type: vmdk {{(pid=62508) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2223.896953] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368874', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'name': 'volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '68a51395-bb35-4f3b-b18e-2483f711a2ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'serial': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2223.897797] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c43a6ac-b7b0-4f0b-9751-de8fd0c6ab24 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.913710] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c712466-44cc-4e58-9f45-152920ba67a8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.939079] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315/volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2223.939322] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c9da9bf-10de-4f60-9a14-1cff8c3bd4c5 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.957234] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2223.957234] env[62508]: value = "task-1777047" [ 2223.957234] env[62508]: _type = "Task" [ 2223.957234] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.965129] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.469430] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777047, 'name': ReconfigVM_Task, 'duration_secs': 0.326752} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.469821] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315/volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315.vmdk or device None with type thin {{(pid=62508) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2224.474611] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed869061-f11c-4162-a961-6736249d1ac1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.489352] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2224.489352] env[62508]: value = "task-1777048" [ 2224.489352] env[62508]: _type = "Task" [ 2224.489352] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.497368] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.999092] env[62508]: DEBUG oslo_vmware.api [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777048, 'name': ReconfigVM_Task, 'duration_secs': 0.133161} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.999415] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368874', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'name': 'volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '68a51395-bb35-4f3b-b18e-2483f711a2ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'serial': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315'} {{(pid=62508) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2226.035697] env[62508]: DEBUG nova.objects.instance [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid 68a51395-bb35-4f3b-b18e-2483f711a2ca {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2226.542046] env[62508]: DEBUG oslo_concurrency.lockutils [None req-1e989992-8ca9-4f4d-bf7f-e50328364aeb tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.706211] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.706475] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.209896] env[62508]: INFO nova.compute.manager [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Detaching volume e25ffb87-398b-4ad5-9b2e-ba3563c07315 [ 2227.239756] env[62508]: INFO nova.virt.block_device [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Attempting to driver detach volume e25ffb87-398b-4ad5-9b2e-ba3563c07315 from mountpoint /dev/sdb [ 2227.239963] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Volume detach. Driver type: vmdk {{(pid=62508) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2227.240178] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368874', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'name': 'volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '68a51395-bb35-4f3b-b18e-2483f711a2ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'serial': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2227.241067] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dde32f-db2d-4694-8198-0ed1e41df43e {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.262653] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a45df21-3fbb-45b9-9f88-f22a661f49f4 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.269513] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083db264-1744-428f-a544-b59a094bf264 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.289996] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a204f8e-fee3-45f7-a261-b9554aec75b8 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.304169] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] The volume has not been displaced from its original location: [datastore1] volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315/volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315.vmdk. No consolidation needed. {{(pid=62508) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2227.309297] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2227.309544] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0de0b65b-874e-4eae-b99f-0848495a693b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.327489] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2227.327489] env[62508]: value = "task-1777049" [ 2227.327489] env[62508]: _type = "Task" [ 2227.327489] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.334805] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.836948] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777049, 'name': ReconfigVM_Task, 'duration_secs': 0.227567} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.837248] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=62508) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2227.841895] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d3af00c-5b18-4f4f-a236-58356978b626 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.856780] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2227.856780] env[62508]: value = "task-1777050" [ 2227.856780] env[62508]: _type = "Task" [ 2227.856780] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.864113] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.366658] env[62508]: DEBUG oslo_vmware.api [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777050, 'name': ReconfigVM_Task, 'duration_secs': 0.126846} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.366986] env[62508]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368874', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'name': 'volume-e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '68a51395-bb35-4f3b-b18e-2483f711a2ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315', 'serial': 'e25ffb87-398b-4ad5-9b2e-ba3563c07315'} {{(pid=62508) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2228.906541] env[62508]: DEBUG nova.objects.instance [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'flavor' on Instance uuid 68a51395-bb35-4f3b-b18e-2483f711a2ca {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2229.914958] env[62508]: DEBUG oslo_concurrency.lockutils [None req-5eaad923-df55-484c-b62c-7d6755288324 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.944563] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.944933] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.945078] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.945270] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.945441] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.947841] env[62508]: INFO nova.compute.manager [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Terminating instance [ 2230.949369] env[62508]: DEBUG nova.compute.manager [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Start destroying the instance on the hypervisor. {{(pid=62508) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2230.949572] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Destroying instance {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2230.950463] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c4427a-3e69-41a8-b38c-56880423be9b {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.958541] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Powering off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2230.958773] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-600f59a4-5126-44eb-bcbe-8d7bbef9363d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.964512] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2230.964512] env[62508]: value = "task-1777051" [ 2230.964512] env[62508]: _type = "Task" [ 2230.964512] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.973508] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.474630] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777051, 'name': PowerOffVM_Task, 'duration_secs': 0.152117} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.474895] env[62508]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Powered off the VM {{(pid=62508) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2231.475080] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Unregistering the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2231.475330] env[62508]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5610e3f6-c5e2-490a-8622-18173f97e523 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.547558] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Unregistered the VM {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2231.547809] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Deleting contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2231.548044] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleting the datastore file [datastore1] 68a51395-bb35-4f3b-b18e-2483f711a2ca {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2231.548317] env[62508]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31bff183-4050-45e4-ad08-2461f5ede1cf {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.554677] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for the task: (returnval){ [ 2231.554677] env[62508]: value = "task-1777053" [ 2231.554677] env[62508]: _type = "Task" [ 2231.554677] env[62508]: } to complete. {{(pid=62508) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.562314] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.064947] env[62508]: DEBUG oslo_vmware.api [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Task: {'id': task-1777053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126977} completed successfully. {{(pid=62508) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.065355] env[62508]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted the datastore file {{(pid=62508) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2232.065412] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Deleted contents of the VM from datastore datastore1 {{(pid=62508) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2232.065686] env[62508]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Instance destroyed {{(pid=62508) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2232.065909] env[62508]: INFO nova.compute.manager [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2232.066209] env[62508]: DEBUG oslo.service.loopingcall [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62508) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2232.066444] env[62508]: DEBUG nova.compute.manager [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Deallocating network for instance {{(pid=62508) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2232.066568] env[62508]: DEBUG nova.network.neutron [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] deallocate_for_instance() {{(pid=62508) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2232.512829] env[62508]: DEBUG nova.compute.manager [req-7432d920-4f77-487d-ba75-b566a189738f req-2789b1b1-8480-43da-b52d-a944f8c1a1eb service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Received event network-vif-deleted-64d9abe0-5e58-42fb-bf6b-cceb49a17b4a {{(pid=62508) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2232.512999] env[62508]: INFO nova.compute.manager [req-7432d920-4f77-487d-ba75-b566a189738f req-2789b1b1-8480-43da-b52d-a944f8c1a1eb service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Neutron deleted interface 64d9abe0-5e58-42fb-bf6b-cceb49a17b4a; detaching it from the instance and deleting it from the info cache [ 2232.513182] env[62508]: DEBUG nova.network.neutron [req-7432d920-4f77-487d-ba75-b566a189738f req-2789b1b1-8480-43da-b52d-a944f8c1a1eb service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.953792] env[62508]: DEBUG nova.network.neutron [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Updating instance_info_cache with network_info: [] {{(pid=62508) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2233.016167] env[62508]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2d0e6eb-231c-4d06-ba69-7baec3366253 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.026011] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd48f99-a08b-4d0f-8f55-8f7c36bd167d {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.050357] env[62508]: DEBUG nova.compute.manager [req-7432d920-4f77-487d-ba75-b566a189738f req-2789b1b1-8480-43da-b52d-a944f8c1a1eb service nova] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Detach interface failed, port_id=64d9abe0-5e58-42fb-bf6b-cceb49a17b4a, reason: Instance 68a51395-bb35-4f3b-b18e-2483f711a2ca could not be found. {{(pid=62508) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2233.456361] env[62508]: INFO nova.compute.manager [-] [instance: 68a51395-bb35-4f3b-b18e-2483f711a2ca] Took 1.39 seconds to deallocate network for instance. [ 2233.962775] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.963065] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.963307] env[62508]: DEBUG nova.objects.instance [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lazy-loading 'resources' on Instance uuid 68a51395-bb35-4f3b-b18e-2483f711a2ca {{(pid=62508) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2234.496034] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fd4c55-b865-40fe-a06f-c37474d68688 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.503591] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e9af47-08c3-42e9-a2c5-8236f43d90e9 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.534844] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a302362a-cf69-4071-a8c9-124e41ee4e3c {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.541617] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3724ecc-df01-49e5-9c8f-3fbea0ec9233 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.554424] env[62508]: DEBUG nova.compute.provider_tree [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.057673] env[62508]: DEBUG nova.scheduler.client.report [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2235.563147] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.597120] env[62508]: INFO nova.scheduler.client.report [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Deleted allocations for instance 68a51395-bb35-4f3b-b18e-2483f711a2ca [ 2236.104645] env[62508]: DEBUG oslo_concurrency.lockutils [None req-ff2fb9ae-5a1d-4906-9940-ae4728e155f5 tempest-AttachVolumeNegativeTest-489560781 tempest-AttachVolumeNegativeTest-489560781-project-member] Lock "68a51395-bb35-4f3b-b18e-2483f711a2ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.160s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.993038] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2243.993318] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Starting heal instance info cache {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2243.993375] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Rebuilding the list of instances to heal {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2244.497054] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Didn't find any instances for network info cache update. {{(pid=62508) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2245.993784] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.993546] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.993546] env[62508]: DEBUG nova.compute.manager [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62508) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2248.993878] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.497360] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.497641] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.497784] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.497933] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62508) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2249.498883] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce49290f-bf85-4372-8b1a-e8650c8a2ca1 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.509224] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a514e4-37ea-443a-9201-c908c66fd8b2 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.524670] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec8bc19-848d-4965-84ab-41e7bc3978a7 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.531653] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aceafa4-f80f-498c-a5d4-ea869328ab63 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.560904] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180935MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62508) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2249.561097] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.561286] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.581776] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2250.582053] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62508) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2250.596727] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b434c2ce-ced3-485c-97bf-56e33adc4fab {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.604498] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cfa026-cfd6-440f-89ed-5bbfc1b32344 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.634641] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c81ac26-8459-47be-a0f6-900550308edb {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.641989] env[62508]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99eb0fa-2686-48a4-a0f2-93868b865b46 {{(pid=62508) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.655012] env[62508]: DEBUG nova.compute.provider_tree [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed in ProviderTree for provider: 5d5b4923-a8ac-4688-9f86-2405bd3406a9 {{(pid=62508) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.158502] env[62508]: DEBUG nova.scheduler.client.report [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Inventory has not changed for provider 5d5b4923-a8ac-4688-9f86-2405bd3406a9 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62508) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2251.663543] env[62508]: DEBUG nova.compute.resource_tracker [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62508) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2251.663961] env[62508]: DEBUG oslo_concurrency.lockutils [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.102s {{(pid=62508) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.664518] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.993662] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2255.989438] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2255.992991] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.993547] env[62508]: DEBUG oslo_service.periodic_task [None req-153dc56c-c75a-4962-bda3-f7ce91a81c10 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62508) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}